working on it ...

Filters

Explore Public Snippets

Sort by

Found 62k snippets matching: time

    public by cghersi  328013  0  6  1

    Retrieve useful information regarding indexes in MS SQL Server

    This script helps the DBA to retrieve valuable information regarding indexes. Here the complete explanation of the output fields: object_type: type of object (will either be "U" for User-Defined Table or "V" for View) database_name: name of the database schema_name: name of the schema object_name: name of the object create_date: date the obje
    SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED
    SET NOCOUNT ON
    SET ANSI_WARNINGS OFF
    SET ARITHABORT OFF
    SET ARITHIGNORE ON
    SET TEXTSIZE 2147483647
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Declarations / Sets: Declare And Set Variables
    -----------------------------------------------------------------------------------------------------------------------------
    
    DECLARE @Database_ID AS SMALLINT
    
    
    SET @Database_ID = DB_ID ()
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Error Trapping: Check If Temp Table(s) Already Exist(s) And Drop If Applicable
    -----------------------------------------------------------------------------------------------------------------------------
    
    IF OBJECT_ID (N'tempdb.dbo.#temp_index_breakdown_keys_filters', N'U') IS NOT NULL
    BEGIN
    
    	DROP TABLE dbo.#temp_index_breakdown_keys_filters
    
    END
    
    
    IF OBJECT_ID (N'tempdb.dbo.#temp_index_breakdown_size_info', N'U') IS NOT NULL
    BEGIN
    
    	DROP TABLE dbo.#temp_index_breakdown_size_info
    
    END
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Table Insert: Insert Index Key, Include Key, And Filter Definition Values Into Temp Table
    -----------------------------------------------------------------------------------------------------------------------------
    
    SELECT
    	 sqI.[object_id]
    	,sqI.index_id
    	,STUFF (CONVERT (NVARCHAR (MAX), sqI.index_key), 1, 2, N'') AS index_key
    	,STUFF (CONVERT (NVARCHAR (MAX), sqI.include_key), 1, 2, N'') AS include_key
    	,sqI.filter_definition
    	,RANK () OVER
    				(
    					ORDER BY
    						 sqI.[object_id]
    						,CONVERT (NVARCHAR (MAX), sqI.index_key)
    						,sqI.filter_definition
    				) AS dupe_rank
    INTO
    	dbo.#temp_index_breakdown_keys_filters
    FROM
    
    	(
    		SELECT
    			 I.[object_id]
    			,I.index_id
    			,(
    				SELECT
    					  N', '
    					+ C.name
    					+ N' • '
    					+ TYPE_NAME (C.user_type_id)
    					+ ISNULL ((N': [ ' + (CASE
    											WHEN C.system_type_id <> C.user_type_id THEN LOWER (TYPE_NAME (C.system_type_id))
    											END) + N' ]'), N'')
    					+ N' '
    					+ (CASE
    							WHEN TY.name NOT IN (N'bigint', N'bit', N'date', N'datetime', N'datetime2', N'datetimeoffset', N'decimal', N'float', N'int', N'money', N'numeric', N'real', N'smalldatetime', N'smallint', N'smallmoney', N'time', N'tinyint') THEN CONVERT (NVARCHAR (30), C.max_length)
    							ELSE CONVERT (NVARCHAR (30), C.max_length) + N' (' + CONVERT (NVARCHAR (30), COLUMNPROPERTY (C.[object_id], C.name, 'Precision')) + N',' + ISNULL (CONVERT (NVARCHAR (30), COLUMNPROPERTY (C.[object_id], C.name, 'Scale')), 0) + N')'
    							END)
    					+ N' '
    					+ (CASE
    							WHEN IC.is_descending_key = 0 THEN N'[A]'
    							WHEN IC.is_descending_key = 1 THEN N'[D]'
    							ELSE N'[N/A]'
    							END) AS [text()]
    				FROM
    					sys.index_columns IC
    					INNER JOIN sys.columns C ON C.[object_id] = IC.[object_id]
    						AND C.column_id = IC.column_id
    					INNER JOIN sys.types TY ON TY.user_type_id = C.user_type_id
    				WHERE
    					IC.is_included_column = 0
    					AND IC.[object_id] = I.[object_id]
    					AND IC.index_id = I.index_id
    				ORDER BY
    					IC.key_ordinal
    				FOR
    					 XML PATH ('')
    					,TYPE
    			 ) AS index_key
    			,(
    				SELECT
    					  N', '
    					+ C.name
    					+ N' • '
    					+ TYPE_NAME (C.user_type_id)
    					+ ISNULL ((N': [ ' + (CASE
    											WHEN C.system_type_id <> C.user_type_id THEN LOWER (TYPE_NAME (C.system_type_id))
    											END) + N' ]'), N'')
    					+ N' '
    					+ (CASE
    							WHEN TY.name NOT IN (N'bigint', N'bit', N'date', N'datetime', N'datetime2', N'datetimeoffset', N'decimal', N'float', N'int', N'money', N'numeric', N'real', N'smalldatetime', N'smallint', N'smallmoney', N'time', N'tinyint') THEN CONVERT (NVARCHAR (30), C.max_length)
    							ELSE CONVERT (NVARCHAR (30), C.max_length) + N' (' + CONVERT (NVARCHAR (30), COLUMNPROPERTY (C.[object_id], C.name, 'Precision')) + N',' + ISNULL (CONVERT (NVARCHAR (30), COLUMNPROPERTY (C.[object_id], C.name, 'Scale')), 0) + N')'
    							END) AS [text()]
    				FROM
    					sys.index_columns IC
    					INNER JOIN sys.columns C ON C.[object_id] = IC.[object_id]
    						AND C.column_id = IC.column_id
    					INNER JOIN sys.types TY ON TY.user_type_id = C.user_type_id
    				WHERE
    					IC.is_included_column = 1
    					AND IC.[object_id] = I.[object_id]
    					AND IC.index_id = I.index_id
    				ORDER BY
    					IC.key_ordinal
    				FOR
    					 XML PATH ('')
    					,TYPE
    			 ) AS include_key
    			,I.filter_definition
    		FROM
    			sys.indexes I
    	) sqI
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Table Insert: Insert Size Values Into Temp Table
    -----------------------------------------------------------------------------------------------------------------------------
    
    SELECT
    	 DDPS.[object_id]
    	,DDPS.index_id
    	,SUM (CASE
    			WHEN DDPS.index_id < 2 THEN DDPS.row_count
    			END) AS [rows]
    	,SUM (DDPS.reserved_page_count) AS total_pages
    	,SUM (DDPS.used_page_count) AS used_pages
    	,SUM (CASE
    			WHEN DDPS.index_id < 2 THEN DDPS.in_row_data_page_count + DDPS.lob_used_page_count + DDPS.row_overflow_used_page_count
    			ELSE DDPS.lob_used_page_count + DDPS.row_overflow_used_page_count
    			END) AS data_pages
    INTO
    	dbo.#temp_index_breakdown_size_info
    FROM
    	sys.dm_db_partition_stats DDPS
    GROUP BY
    	 DDPS.[object_id]
    	,DDPS.index_id
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Main Query: Final Display / Output
    -----------------------------------------------------------------------------------------------------------------------------
    
    SELECT
    	 (CASE
    		WHEN sqBAQ.row_filter = 1 THEN sqBAQ.[type]
    		ELSE ''
    		END) AS object_type
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN DB_NAME ()
    		ELSE ''
    		END) AS [database_name]
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN SCHEMA_NAME (sqBAQ.[schema_id])
    		ELSE ''
    		END) AS [schema_name]
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN OBJECT_NAME (sqBAQ.[object_id])
    		ELSE ''
    		END) AS [object_name]
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN sqBAQ.create_date
    		ELSE ''
    		END) AS create_date
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN caMDKL.modify_date
    		ELSE ''
    		END) AS modify_date
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (25), CONVERT (MONEY, sqBAQ.[rows]), 1)), 4, 22))
    		ELSE ''
    		END) AS [rows]
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (25), CONVERT (MONEY, (sqBAQ.total_pages * 8) / 1024.0), 1)
    		ELSE ''
    		END) AS total_mb
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (25), CONVERT (MONEY, (sqBAQ.used_pages * 8) / 1024.0), 1)
    		ELSE ''
    		END) AS used_mb
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (25), CONVERT (MONEY, (sqBAQ.unused_pages * 8) / 1024.0), 1)
    		ELSE ''
    		END) AS unused_mb
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (25), CONVERT (MONEY, (sqBAQ.data_pages * 8) / 1024.0), 1)
    		ELSE ''
    		END) AS data_mb
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (25), CONVERT (MONEY, (sqBAQ.index_pages * 8) / 1024.0), 1)
    		ELSE ''
    		END) AS index_mb
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (6), CONVERT (DECIMAL (5, 2), ISNULL (((sqBAQ.data_pages + .0) / sqBAQ.used_pages) * 100, 0)))
    		ELSE ''
    		END) AS pct_data
    	,(CASE
    		WHEN sqBAQ.row_filter = 1 THEN CONVERT (VARCHAR (6), CONVERT (DECIMAL (5, 2), ISNULL (((sqBAQ.index_pages + .0) / sqBAQ.used_pages) * 100, 0)))
    		ELSE ''
    		END) AS pct_index
    	,sqBAQ.type_desc AS index_type
    	,ISNULL (sqBAQ.index_name, '') AS index_name
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		WHEN sqBAQ.is_primary_key = 0 AND sqBAQ.is_unique = 0 THEN REPLICATE ('.', 6)
    		WHEN sqBAQ.is_system_named = 0 THEN 'No'
    		WHEN sqBAQ.is_system_named = 1 THEN 'Yes'
    		ELSE ''
    		END) AS system_named
    	,(CASE
    		WHEN sqBAQ.is_primary_key = 1 THEN 'Yes'
    		ELSE ''
    		END) AS is_pk
    	,(CASE
    		WHEN sqBAQ.is_unique_constraint = 1 THEN 'C'
    		WHEN sqBAQ.is_unique = 1 THEN 'I'
    		ELSE ''
    		END) AS [unique]
    	,(CASE
    		WHEN sqBAQ.is_disabled = 1 THEN 'Yes'
    		ELSE ''
    		END) AS [disabled]
    	,(CASE
    		WHEN sqBAQ.is_hypothetical = 1 THEN 'Yes'
    		ELSE ''
    		END) AS hypothetical
    	,REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, sqCC.total_columns), 1)), 4, 23)) AS total_columns
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, caMDKL.[index_columns]), 1)), 4, 23))
    		END) AS [index_columns]
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, caMDKL.include_columns), 1)), 4, 23))
    		END) AS include_columns
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, (caMDKL.[index_columns] / sqCC.total_columns) * 100), 1)), 1, 23))
    		END) AS index_pct_of_columns
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, (caMDKL.include_columns / sqCC.total_columns) * 100), 1)), 1, 23))
    		END) AS include_pct_of_columns
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (23), CONVERT (MONEY, ((caMDKL.[index_columns] + caMDKL.include_columns) / sqCC.total_columns) * 100), 1)), 1, 23))
    		END) AS total_pct_of_columns
    	,CONVERT (VARCHAR (25), CONVERT (MONEY, (ISNULL (sqBAQ.individual_index_pages, 0) * 8) / 1024.0), 1) AS key_mb
    	,CONVERT (VARCHAR (6), CONVERT (DECIMAL (5, 2), ISNULL (((sqBAQ.individual_index_pages + .0) / sqBAQ.index_pages) * 100, 0))) AS key_mb_pct
    	,(CASE
    		WHEN sqBAQ.type_desc = N'HEAP' THEN ''
    		ELSE REVERSE (SUBSTRING (REVERSE (CONVERT (VARCHAR (25), CONVERT (MONEY, sqKL.total_max_length), 1)), 4, 22))
    		END) AS max_key_size_bytes
    	,(CASE
    		WHEN sqKL.total_max_length > 900 THEN 'Yes'
    		ELSE ''
    		END) AS over_key_size_limit
    	,ISNULL (ttIBKF.index_key, N'') AS index_key
    	,ISNULL (ttIBKF.include_key, N'') AS include_key
    	,ISNULL (ttIBKF.filter_definition, N'') AS filter_definition
    	,(CASE
    		WHEN sqED02.dupe_id IS NOT NULL THEN CONVERT (VARCHAR (20), sqED02.dupe_id) + ' - [' + CONVERT (VARCHAR (11), sqED02.total_dupes) + ']'
    		ELSE ''
    		END) AS dupe_id
    	,sqBAQ.is_unused AS unused
    	,ISNULL (CONVERT (VARCHAR (10), STATS_DATE (sqBAQ.[object_id], sqBAQ.index_id), 23), '') AS statistics_date
    	,(CASE
    		WHEN sqBAQ.[allow_row_locks] = 0 THEN 'No'
    		WHEN sqBAQ.[allow_row_locks] = 1 THEN 'Yes'
    		ELSE ''
    		END) AS row_locks
    	,(CASE
    		WHEN sqBAQ.[allow_page_locks] = 0 THEN 'No'
    		WHEN sqBAQ.[allow_page_locks] = 1 THEN 'Yes'
    		ELSE ''
    		END) AS page_locks
    	,(CASE
    		WHEN sqBAQ.[ignore_dup_key] = 0 THEN 'No'
    		WHEN sqBAQ.[ignore_dup_key] = 1 THEN 'Yes'
    		ELSE ''
    		END) AS ignore_dupes
    	,(CASE
    		WHEN sqBAQ.no_recompute = 0 THEN 'Yes'
    		WHEN sqBAQ.no_recompute = 1 THEN 'No'
    		ELSE ''
    		END) AS auto_stats
    	,(CASE
    		WHEN sqBAQ.is_padded = 0 THEN 'No'
    		WHEN sqBAQ.is_padded = 1 THEN 'Yes'
    		ELSE ''
    		END) AS padded
    	,(CASE
    		WHEN sqBAQ.fill_factor = 0 THEN 100
    		ELSE sqBAQ.fill_factor
    		END) AS fill_factor
    	,(CASE
    		WHEN sqBAQ.user_seeks > 0 THEN CONVERT (VARCHAR (20), sqBAQ.user_seeks)
    		ELSE ''
    		END) AS user_seeks
    	,(CASE
    		WHEN sqBAQ.user_scans > 0 THEN CONVERT (VARCHAR (20), sqBAQ.user_scans)
    		ELSE ''
    		END) AS user_scans
    	,(CASE
    		WHEN sqBAQ.user_lookups > 0 THEN CONVERT (VARCHAR (20), sqBAQ.user_lookups)
    		ELSE ''
    		END) AS user_lookups
    	,(CASE
    		WHEN sqBAQ.user_updates > 0 THEN CONVERT (VARCHAR (20), sqBAQ.user_updates)
    		ELSE ''
    		END) AS user_updates
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_user_seek, 23), '') AS last_user_seek
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_user_scan, 23), '') AS last_user_scan
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_user_lookup, 23), '') AS last_user_lookup
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_user_update, 23), '') AS last_user_update
    	,(CASE
    		WHEN sqBAQ.system_seeks > 0 THEN CONVERT (VARCHAR (20), sqBAQ.system_seeks)
    		ELSE ''
    		END) AS system_seeks
    	,(CASE
    		WHEN sqBAQ.system_scans > 0 THEN CONVERT (VARCHAR (20), sqBAQ.system_scans)
    		ELSE ''
    		END) AS system_scans
    	,(CASE
    		WHEN sqBAQ.system_lookups > 0 THEN CONVERT (VARCHAR (20), sqBAQ.system_lookups)
    		ELSE ''
    		END) AS system_lookups
    	,(CASE
    		WHEN sqBAQ.system_updates > 0 THEN CONVERT (VARCHAR (20), sqBAQ.system_updates)
    		ELSE ''
    		END) AS system_updates
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_system_seek, 23), '') AS last_system_seek
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_system_scan, 23), '') AS last_system_scan
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_system_lookup, 23), '') AS last_system_lookup
    	,ISNULL (CONVERT (VARCHAR (10), sqBAQ.last_system_update, 23), '') AS last_system_update
    FROM
    
    	(
    		SELECT
    			 O.[type]
    			,O.[schema_id]
    			,O.[object_id]
    			,CONVERT (VARCHAR (10), O.create_date, 23) AS create_date
    			,CONVERT (VARCHAR (10), O.modify_date, 23) AS modify_date
    			,sqDDPS.[rows]
    			,sqDDPS.total_pages
    			,sqDDPS.used_pages
    			,(CASE
    				WHEN sqDDPS.total_pages > sqDDPS.used_pages THEN sqDDPS.total_pages - sqDDPS.used_pages
    				ELSE 0
    				END) AS unused_pages
    			,sqDDPS.data_pages
    			,(CASE
    				WHEN sqDDPS.used_pages > sqDDPS.data_pages THEN sqDDPS.used_pages - sqDDPS.data_pages
    				ELSE 0
    				END) AS index_pages
    			,sqI.type_desc
    			,sqI.name AS index_name
    			,sqI.is_system_named
    			,sqI.is_primary_key
    			,sqI.is_unique
    			,sqI.is_disabled
    			,sqI.is_hypothetical
    			,sqI.individual_index_pages
    			,sqI.is_unused
    			,sqI.[allow_row_locks]
    			,sqI.[allow_page_locks]
    			,sqI.[ignore_dup_key]
    			,sqI.no_recompute
    			,sqI.is_padded
    			,sqI.fill_factor
    			,sqI.user_seeks
    			,sqI.user_scans
    			,sqI.user_lookups
    			,sqI.user_updates
    			,sqI.last_user_seek
    			,sqI.last_user_scan
    			,sqI.last_user_lookup
    			,sqI.last_user_update
    			,sqI.system_seeks
    			,sqI.system_scans
    			,sqI.system_lookups
    			,sqI.system_updates
    			,sqI.last_system_seek
    			,sqI.last_system_scan
    			,sqI.last_system_lookup
    			,sqI.last_system_update
    			,sqI.is_unique_constraint
    			,sqI.index_id
    			,sqI.row_filter
    		FROM
    			sys.objects O
    			INNER JOIN
    
    				(
    					SELECT
    						 ttIBSI.[object_id]
    						,SUM (ttIBSI.[rows]) AS [rows]
    						,SUM (ttIBSI.total_pages) AS total_pages
    						,SUM (ttIBSI.used_pages) AS used_pages
    						,SUM (ttIBSI.data_pages) AS data_pages
    					FROM
    						dbo.#temp_index_breakdown_size_info ttIBSI
    					GROUP BY
    						ttIBSI.[object_id]
    				) sqDDPS ON sqDDPS.[object_id] = O.[object_id]
    
    			INNER JOIN
    
    				(
    					SELECT
    						 I.[object_id]
    						,I.type_desc
    						,I.name
    						,KC.is_system_named
    						,I.is_primary_key
    						,I.is_unique
    						,I.is_disabled
    						,I.is_hypothetical
    						,(CASE
    							WHEN ttIBSI.used_pages > ttIBSI.data_pages THEN ttIBSI.used_pages - ttIBSI.data_pages
    							END) AS individual_index_pages
    						,(CASE
    							WHEN I.[type] = 0 THEN ''
    							WHEN I.[type] = 1 THEN REPLICATE ('.', 6)
    							WHEN I.is_primary_key = 1 THEN REPLICATE ('.', 6)
    							WHEN I.is_unique = 1 THEN REPLICATE ('.', 6)
    							WHEN EXISTS
    
    								(
    									SELECT
    										*
    									FROM
    										sys.index_columns IC
    										INNER JOIN sys.foreign_key_columns FKC ON FKC.parent_object_id = IC.[object_id]
    											AND FKC.parent_column_id = IC.column_id
    									WHERE
    										IC.[object_id] = I.[object_id]
    										AND IC.index_id = I.index_id
    								) THEN REPLICATE ('.', 6)
    
    							WHEN DDIUS.[object_id] IS NOT NULL THEN (CASE
    																		WHEN DDIUS.user_seeks + DDIUS.user_scans + DDIUS.user_lookups + DDIUS.user_updates = 0 THEN 'Y/N'
    																		ELSE 'No'
    																		END)
    							ELSE 'Yes'
    							END) AS is_unused
    						,I.[allow_row_locks]
    						,I.[allow_page_locks]
    						,I.[ignore_dup_key]
    						,S.no_recompute
    						,I.is_padded
    						,I.fill_factor
    						,DDIUS.user_seeks
    						,DDIUS.user_scans
    						,DDIUS.user_lookups
    						,DDIUS.user_updates
    						,DDIUS.last_user_seek
    						,DDIUS.last_user_scan
    						,DDIUS.last_user_lookup
    						,DDIUS.last_user_update
    						,DDIUS.system_seeks
    						,DDIUS.system_scans
    						,DDIUS.system_lookups
    						,DDIUS.system_updates
    						,DDIUS.last_system_seek
    						,DDIUS.last_system_scan
    						,DDIUS.last_system_lookup
    						,DDIUS.last_system_update
    						,I.is_unique_constraint
    						,I.index_id
    						,ROW_NUMBER () OVER
    											(
    												PARTITION BY
    													I.[object_id]
    												ORDER BY
    													 I.is_primary_key DESC
    													,(CASE
    														WHEN I.[type] = 0 THEN 'Z'
    														ELSE 'A'
    														END)
    													,I.[type]
    													,I.name
    											) AS row_filter
    					FROM
    						sys.indexes I
    						INNER JOIN dbo.#temp_index_breakdown_size_info ttIBSI ON ttIBSI.[object_id] = I.[object_id]
    							AND ttIBSI.index_id = I.index_id
    						LEFT JOIN sys.key_constraints KC ON KC.parent_object_id = I.[object_id]
    							AND KC.unique_index_id = I.index_id
    						LEFT JOIN sys.stats S ON S.[object_id] = I.[object_id]
    							AND S.stats_id = I.index_id
    						LEFT JOIN master.sys.dm_db_index_usage_stats DDIUS ON DDIUS.[object_id] = I.[object_id]
    							AND DDIUS.index_id = I.index_id
    							AND DDIUS.database_id = @Database_ID
    				) sqI ON sqI.[object_id] = O.[object_id]
    
    		WHERE
    			O.[type] IN ('U', 'V')
    			AND O.is_ms_shipped = 0
    			AND NOT
    
    				(
    					SCHEMA_NAME (O.[schema_id]) = N'dbo'
    					AND O.name = N'sysdiagrams'
    					AND O.[type] = 'U'
    				)
    
    	) sqBAQ
    
    	INNER JOIN
    
    		(
    			SELECT
    				 C.[object_id]
    				,COUNT (*) + .0 AS total_columns
    			FROM
    				sys.columns C
    			GROUP BY
    				C.[object_id]
    		) sqCC ON sqCC.[object_id] = sqBAQ.[object_id]
    
    	LEFT JOIN dbo.#temp_index_breakdown_keys_filters ttIBKF ON ttIBKF.[object_id] = sqBAQ.[object_id]
    		AND ttIBKF.index_id = sqBAQ.index_id
    	LEFT JOIN
    
    		(
    			SELECT
    				 IC.[object_id]
    				,IC.index_id
    				,SUM (C.max_length) AS total_max_length
    			FROM
    				sys.index_columns IC
    				INNER JOIN sys.columns C ON C.[object_id] = IC.[object_id]
    					AND C.column_id = IC.column_id
    			WHERE
    				IC.is_included_column = 0
    			GROUP BY
    				 IC.[object_id]
    				,IC.index_id
    		) sqKL ON sqKL.[object_id] = sqBAQ.[object_id] AND sqKL.index_id = sqBAQ.index_id
    
    	LEFT JOIN
    
    		(
    			SELECT
    				 sqED01.dupe_rank
    				,sqED01.total_dupes
    				,ROW_NUMBER () OVER
    									(
    										ORDER BY
    											(SELECT NULL)
    									) AS dupe_id
    			FROM
    
    				(
    					SELECT
    						 ttIBKF.dupe_rank
    						,COUNT (*) AS total_dupes
    					FROM
    						dbo.#temp_index_breakdown_keys_filters ttIBKF
    					GROUP BY
    						ttIBKF.dupe_rank
    					HAVING
    						COUNT (*) > 1
    				) sqED01
    
    		) sqED02 ON sqED02.dupe_rank = ttIBKF.dupe_rank
    
    	CROSS APPLY
    
    		(
    			SELECT
    				 (CASE
    					WHEN sqBAQ.modify_date = sqBAQ.create_date THEN REPLICATE ('.', 18)
    					ELSE sqBAQ.modify_date
    					END) AS modify_date
    				,LEN (ttIBKF.index_key) - LEN (REPLACE (ttIBKF.index_key, '•', '')) AS [index_columns]
    				,ISNULL (LEN (ttIBKF.include_key) - LEN (REPLACE (ttIBKF.include_key, '•', '')), 0) AS include_columns
    		) caMDKL
    
    ORDER BY
    	 sqBAQ.[type]
    	,SCHEMA_NAME (sqBAQ.[schema_id])
    	,OBJECT_NAME (sqBAQ.[object_id])
    	,sqBAQ.row_filter
    
    
    -----------------------------------------------------------------------------------------------------------------------------
    --	Cleanup: Drop Any Remaining Temp Tables
    -----------------------------------------------------------------------------------------------------------------------------
    
    IF OBJECT_ID (N'tempdb.dbo.#temp_index_breakdown_keys_filters', N'U') IS NOT NULL
    BEGIN
    
    	DROP TABLE dbo.#temp_index_breakdown_keys_filters
    
    END
    
    
    IF OBJECT_ID (N'tempdb.dbo.#temp_index_breakdown_size_info', N'U') IS NOT NULL
    BEGIN
    
    	DROP TABLE dbo.#temp_index_breakdown_size_info
    
    END

    public by mporru @ Amazon_AWS_PHP_API  320875  0  5  0

    Amazon AWS - PHP: Generate signed expiring download URL

    This snippet demonstrates how to generate a download URL for a file stored in your S3 Bucket. This will work for all files, even ACL private files. Download URL will be active for the specified time. In order to use this service you must enable Amazon Simple Storage Service (Amazon S3) in your account.
    <?php
    require_once('sdk.class.php');
    $s3 = new AmazonS3();
    
    $fileName = 'test1.txt';
    $bucketName = 'myBucketName';
    
    $url = $s3->get_object_url($bucketName, $fileName, '1 hour');
    
    echo("Download url: <a href=\"$url\">$url</a>\n");
    ?>

    public by phuonght  212092  0  3  0

    A small script to make recording http live streams (HLS, those streams that work on iOS devices) nicer on a Mac. Script records the stream for a defined period of time and sends the user notifications if anything goes wrong and once it's done.

    A small script to make recording http live streams (HLS, those streams that work on iOS devices) nicer on a Mac. Script records the stream for a defined period of time and sends the user notifications if anything goes wrong and once it's done.: HLS_dvr.sh
    # required: ffmpeg (e.g. from homebrew), terminal-notifier from https://github.com/alloy/terminal-notifier 
    # you can schedule this with launchd to run e.g. weekly
    
    # Specify in seconds how long the script should record (default here is 1 hour).
    seconds=3600
    
    # Date format for the recording file name
    DATE=`date "+%d-%m-%y_%H-%M"`
    
    # start ffmpeg recording
    ffmpeg -re -i http://website.com/playlist.m3u8 -c copy -bsf:a aac_adtstoasc recording_$DATE.mp4 &
    
    # notification that recording has started
    if [ "$(pgrep -P $$ 'ffmpeg')" ]
    then
    	/Applications/terminal-notifier.app/Contents/MacOS/terminal-notifier -title 'ffmpeg' -message "is recording now" -sender 'com.apple.Terminal'
    else
    	/Applications/terminal-notifier.app/Contents/MacOS/terminal-notifier -title 'ffmpeg' -message "is not recording!" -sound Funk -sender 'com.apple.Terminal'
    	exit 42
    fi
    
    # check every 30 seconds for $seconds to make sure ffmpeg is still running
    START=`date +%s`
    while [ $(( $(date +%s) - $seconds )) -lt $START ]; do
    	if [ -z "$(pgrep -P $$ 'ffmpeg')" ]
        	then
           		/Applications/terminal-notifier.app/Contents/MacOS/terminal-notifier -title 'ffmpeg' -message "is no longer running" -sound Funk -sender 'com.apple.Terminal'
      	fi
    	sleep 30
    done
    
    # notification when time is up
    /Applications/terminal-notifier.app/Contents/MacOS/terminal-notifier -title 'ffmpeg' -message "recording finished" -sound default -sender 'com.apple.Terminal'
    
    # stop ffmpeg (using this because stopping ffmpeg via -t for duration turned out to be extremely unreliable)
    kill $(pgrep -P $$ 'ffmpeg')
    
    
    

    public by ejmurray  249965  16  3  0

    Photo management script. This script will copy photos from "~/Pictures/iPhone Incoming" into a tree the script creates, with folders representing month and years, and photo names timestamped. Completely based on the work of the amazing Dr. Drang; se...

    Photo management script. This script will copy photos from "~/Pictures/iPhone Incoming" into a tree the script creates, with folders representing month and years, and photo names timestamped. Completely based on the work of the amazing Dr. Drang; see here: http://www.leancrew.com/all-this/2013/10/photo-management-via-the-finder/ You can see mor
    #!/usr/bin/python
    
    import sys
    import os, shutil
    import subprocess
    import os.path
    from datetime import datetime
    
    ######################## Functions #########################
    
    def photoDate(f):
      "Return the date/time on which the given photo was taken."
    
      cDate = subprocess.check_output(['sips', '-g', 'creation', f])
      cDate = cDate.split('\n')[1].lstrip().split(': ')[1]
      return datetime.strptime(cDate, "%Y:%m:%d %H:%M:%S")
    
    
    ###################### Main program ########################
    
    # Where the photos are and where they're going.
    sourceDir = os.environ['HOME'] + '/Pictures/iPhone Incoming'
    destDir = os.environ['HOME'] + '/Pictures/iPhone'
    errorDir = destDir + '/Unsorted/'
    
    # The format for the new file names.
    fmt = "%Y-%m-%d %H-%M-%S"
    
    # The problem files.
    problems = []
    
    # Get all the JPEGs in the source folder.
    photos = os.listdir(sourceDir)
    photos = [ x for x in photos if x[-4:] == '.jpg' or x[-4:] == '.JPG' ]
    
    # Prepare to output as processing occurs
    lastMonth = 0
    lastYear = 0
    
    # Create the destination folder if necessary
    if not os.path.exists(destDir):
      os.makedirs(destDir)
    if not os.path.exists(errorDir):
      os.makedirs(errorDir)
    
    # Copy photos into year and month subfolders. Name the copies according to
    # their timestamps. If more than one photo has the same timestamp, add
    # suffixes 'a', 'b', etc. to the names. 
    for photo in photos:
      # print "Processing %s..." % photo
      original = sourceDir + '/' + photo
      suffix = 'a'
      try:
        pDate = photoDate(original)
        yr = pDate.year
        mo = pDate.month
    
        if not lastYear == yr or not lastMonth == mo:
          sys.stdout.write('\nProcessing %04d-%02d...' % (yr, mo))
          lastMonth = mo
          lastYear = yr
        else:
          sys.stdout.write('.')
        
        newname = pDate.strftime(fmt)
        thisDestDir = destDir + '/%04d/%02d' % (yr, mo)
        if not os.path.exists(thisDestDir):
          os.makedirs(thisDestDir)
    
        duplicate = thisDestDir + '/%s.jpg' % (newname)
        while os.path.exists(duplicate):
          newname = pDate.strftime(fmt) + suffix
          duplicate = destDir + '/%04d/%02d/%s.jpg' % (yr, mo, newname)
          suffix = chr(ord(suffix) + 1)
        shutil.copy2(original, duplicate)
      except Exception:
        shutil.copy2(original, errorDir + photo)
        problems.append(photo)
      except:
        sys.exit("Execution stopped.")
    
    # Report the problem files, if any.
    if len(problems) > 0:
      print "\nProblem files:"
      print "\n".join(problems)
      print "These can be found in: %s" % errorDir
    
    
    

    public by cghersi  173418  8  7  0

    Nice way to show the current time of day

    Current Time printed in a nice way
    public static string NiceCurrentTime()
    {  
      int hour = DateTime.Now.Hour;
    
      if (hour <= 5)
        return "this night oh yeah";
      else if (hour > 5 && hour <= 12)
        return "this morning";
      else if (hour > 12 && hour < 18)
        return "this afternoon";
      else 
        return "this evening";
    }                                                                                    

    public by skaggej  219408  2  6  0

    SharePoint 2010 - List All SharePoint PowerShell Cmdlets

    This is a quick and easy way to view and save all of the PowerShell cmdlets that are available for SharePoint. It's quite useful to have this list of commands handy. Once you know which command you want to run, then it's time to learn how to use it with Get-Help.
    Get-Command -Module Microsoft.SharePoint.PowerShell | Select Name >> C:\AllSharePointPowerShellCmdlets.txt
    

    public by wassim  196354  0  5  0

    IOS reload data with last update data/time

    how to update the last update date/time and hide the refresh control when the data is loaded in the table.
    
    - (void)reloadData
    {
        // Reload table data
        [self.tableView reloadData];
        
        // End the refreshing
        if (self.refreshControl) {
            
            NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
            [formatter setDateFormat:@"MMM d, h:mm a"];
            NSString *title = [NSString stringWithFormat:@"Last update: %@", [formatter stringFromDate:[NSDate date]]];
            NSDictionary *attrsDictionary = [NSDictionary dictionaryWithObject:[UIColor whiteColor]
                                                                        forKey:NSForegroundColorAttributeName];
            NSAttributedString *attributedTitle = [[NSAttributedString alloc] initWithString:title attributes:attrsDictionary];
            self.refreshControl.attributedTitle = attributedTitle;
            
            [self.refreshControl endRefreshing];
        }
    }

    public by cghersi @ MailUp API DEV  206842  8  6  1

    MailUp REST API - How to subscribe to a particular list several contacts with the "Confirmed Optin" Procedure

    This snippet takes advantage of the MailUp .NET SDK to setup the secure communication towards the server through OAuth2 two leg authentication. You can start digging into the MailUP API world by reading this content: http://help.mailup.com/display/mailupapi/REST+API An instance of MailUpClient class is able to manage one user at a time. A MailU
    public static void SubcribeRecipientsWithDoubleOptin(string clientID, string clientSecret, string username, string password)
    {
        // 1) Setup the MailUp Client:
        MailUpClient client = new MailUpClient(clientID, clientSecret);
        try
        {
            client.RetrieveAccessToken(username, password);
        }
        catch (MailUpException ex)
        {
            Console.WriteLine("Unable to access the service due to " + ex.Message);
            return;
        }
    
        // 2) Subscribe recipients:
        int idList = 1;
        List<ConsoleRecipientItem> recipients = new List<ConsoleRecipientItem>();
        ConsoleRecipientItem recipient1 = new ConsoleRecipientItem()
        {
            Email = "test1@mailup.com",
            Name = "First New Recipient" + DateTime.Now.ToFileTimeUtc(),
            MobileNumber = "3331234567",
            MobilePrefix = "39",
            Fields = new List<ConsoleRecipientDynamicFieldItem>()
            {
                new ConsoleRecipientDynamicFieldItem() { Id = 1, Value = "Name1_" + DateTime.Now.ToFileTimeUtc() },
                new ConsoleRecipientDynamicFieldItem() { Id = 2, Value = "LastName1_" + DateTime.Now.ToFileTimeUtc() }
                //here you can add all the fields you want to add...
            }
        };
        ConsoleRecipientItem recipient2 = new ConsoleRecipientItem()
        {
            Email = "test2@mailup.com",
            Name = "Second New Recipient" + DateTime.Now.ToFileTimeUtc(),
            MobileNumber = "3337654321",
            MobilePrefix = "39",
            Fields = new List<ConsoleRecipientDynamicFieldItem>()
            {
                new ConsoleRecipientDynamicFieldItem() { Id = 1, Value = "Name2_" + DateTime.Now.ToFileTimeUtc() },
                new ConsoleRecipientDynamicFieldItem() { Id = 2, Value = "LastName2_" + DateTime.Now.ToFileTimeUtc() }
                //here you can add all the fields you want to add...
            }
        };
        recipients.Add(recipient1);
        recipients.Add(recipient2);
        int subscriptionResult = 0;
        try
        {
            subscriptionResult = client.AddRecipientsToList(idList, recipients, true);
        }
        catch (Exception ex)
        {
            Console.WriteLine("Cannot perform the operation due to " + ex.Message);
            return;
        }
    
        // 2.1) wait for the end of import task:
        if (subscriptionResult > 0)
        {
            Console.WriteLine("Recipients Import started; import ID:" + subscriptionResult);
    
            //optionally we can check the import status:
            bool completed = false;
            do
            {
                ConsoleImportStatus importStatus = client.CheckImportStatus(subscriptionResult);
                if (importStatus == null)
                {
                    Console.WriteLine("Cannot check the import status for import task #" + subscriptionResult);
                    return;
                }
                completed = importStatus.Completed;
            } while (!completed);
            Console.WriteLine("Recipients Added!");
        }
        else
        {
            Console.WriteLine("An error occurred while adding the recipients.");
            return;
        }
    
        // 3) Retrieve the sendingID of the confirmation email related to the previous task:
        int sendingID = -1;
        try
        {
            sendingID = client.GetSendingIDForImport(subscriptionResult);
        }
        catch (Exception ex)
        {
            Console.WriteLine("Cannot retrieve sending ID due to " + ex.Message);
            return;
        }
        if (sendingID <= 0)
        {
            Console.WriteLine("Cannot retrieve sending ID.");
            return;
        }
    
        // 4) Send the confirmation email:
        EmailSendingItem sendingResult = null;
        try
        {
            sendingResult = client.SendEmailMessage(sendingID);
        }
        catch (Exception ex)
        {
            Console.WriteLine("Cannot send email due to " + ex.Message);
            return;
        }
        if (sendingResult == null)
            Console.WriteLine("Cannot send confirmation email.");
        else
            Console.WriteLine("Confirmation email sent!");
    }
                

    public by atucom  192939  0  3  0

    automatically log speedtest results to file with timestamp for logging.

    automatically log speedtest results to file with timestamp for logging.: lolbandwidth.py
    #!/usr/bin/python
    #stolen and modified from the reddit post about the raspbeery pi tweeting at comcast
    #run this every 10 minutes (or w/e) with cron:
    #"crontab -e"
    #*/10 * * * * /home/pi/lolbandwidth.py
    import os
    import sys
    import csv
    import datetime
    import time
    
    def test():
            #run speedtest-cli
            print 'running test'
            #install with "pip install speedtest-cli"
            csvlog = '/home/pi/speed_data.csv'
            a = os.popen("speedtest-cli --simple").read()
            print 'ran'
            #split the 3 line result (ping,down,up)
            lines = a.split('\n')
            print a
            ts = time.time()
            date =datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
            #if speedtest could not connect set the speeds to 0
            if "Cannot" in a:
                    p = 100
                    d = 0
                    u = 0
            #extract the values for ping down and up values
            else:
                    p = lines[0][6:11]
                    d = lines[1][10:14]
                    u = lines[2][8:12]
            print date,p, d, u
            #save the data to file for local network plotting
            out_file = open(csvlog, 'a')
            writer = csv.writer(out_file)
            writer.writerow((ts*1000,p,d,u))
            out_file.close()
            return
            
    if __name__ == '__main__':
            test()
            print 'completed'
    
    
    

    public by boloka  129805  1  5  0

    process time

    How much time running is my php process? Calculate this time. Very simple.
    PHP
    $start_time = microtime(true);
    
    // code...
    
    $end_time = microtime(true);
    
    $process_time = $end_time - $start_time;
    echo 'This code running is: ' . $process_time . ' sec.';
    • Public Snippets
    • Channels Snippets