-- Last updated: March 18, 2025
-- Synopsis: This script retrieves detailed change log information from the ObjectChanges, PropertyChanges, and ChangeSets tables.
-- It filters the results based on specific identifiers stored in a table variable, in this example Guids.
-- In this example T-Sql the library FrameLog is used to store a log
-- DateTime columns are retrieved by looking at number of ticks elapsed since DateTime.MinValue as
-- DateTime columns are stored in SQL Server as a this numeric value.
DECLARE @EXAMPLEGUIDS TABLE (ID NVARCHAR(36))
INSERT INTO @EXAMPLEGUIDS (Id)
VALUES
('1968126a-64c1-4d15-bf23-8cb8497dcaa9'),
('3e11aad8-95df-4377-ad63-c2fec3d43034'),
('acbdd116-b6a5-4425-907b-f86cb55aeedd') --tip: define which form Guids to fetch the ChangeLog database tables which 'FrameLog' uses The form Guids can each be retrieved from url showing the form in MRS in the browser
SELECT
o.Id as ObjectChanges_Id,
o.ObjectReference as ObjectReference,
o.TypeName as ObjectChanges_TypeName,
c.Id as Changeset_Id,
c.[Timestamp] as Changeset_Timestamp,
c.Author_UserName as Changeset_AuthorName,
p.[Id] as PropertyChanges_Id,
p.[PropertyName],
p.[Value],
p.[ValueAsInt],
CASE
WHEN p.Value IS NOT NULL
AND ISNUMERIC(p.[Value]) = 1
AND CAST(p.[Value] AS decimal) > 100000000000
THEN
DATEADD(SECOND,
CAST(CAST(p.[Value] AS decimal) / 10000000 AS BIGINT) % 60,
DATEADD(MINUTE,
CAST(CAST(p.[Value] AS decimal) / 10000000 / 60 AS BIGINT),
CAST('0001-01-01' AS datetime2)
)
)
ELSE NULL
END AS ValueAsDate,
o.ChangeType as ObjectChanges_ChangeTypeIfSet
FROM propertychanges p
LEFT OUTER JOIN ObjectChanges o on o.Id = p.ObjectChange_Id
LEFT OUTER JOIN ChangeSets c on o.ChangeSet_Id = c.Id
WHERE ObjectChange_Id in (
SELECT ObjectChanges.Id
FROM PropertyChanges
LEFT OUTER JOIN ObjectChanges on ObjectChanges.Id = PropertyChanges.ObjectChange_Id
LEFT OUTER JOIN ChangeSets on ObjectChanges.ChangeSet_Id = ChangeSets.Id
WHERE ObjectChange_Id in (SELECT Id FROM ObjectChanges where ObjectReference IN (
SELECT Id FROM @EXAMPLEGUIDS
))) --find out the Changeset where ObjectChange_Id equals the Id of ObjectChanges where ObjectReference equals one of the identifiers in @EXAMPLEGUIDS
ORDER BY ObjectReference, Changeset_Id DESC, Changeset_Timestamp DESC
The T-Sql is handy in case you come across datetime columns from .NET that are saved as ticks in a column as numerical value and shows how we can do the conversion.
Showing posts with label T-SQL. Show all posts
Showing posts with label T-SQL. Show all posts
Tuesday, 18 March 2025
Converting a .NET Datetime to a DateTime2 in T-SQL
This article presents a handy T-Sql that extracts a DateTime value stored in .NET in a numerical field and converts it into a Sql Server DateTime (DateTime2 column).
The T-SQL will convert into a DateTime2 with a SECOND precision.
An assumption here is that any numerical value larger than 100000000000 contains a DateTime value. This is an acceptable assumption when you log data, as very large values usually indicate a datetime value. But you might want to have additional
checking here of course in addition to what i show in the example T-SQL script.
Here is the T-SQL that shows how we can convert the .NET DateTime into a SQL DateTime.
Etiketter:
.net,
Database technical,
Entity Framework,
Framelog,
Sql-Server,
T-SQL
Wednesday, 12 March 2025
Rebuild indexes in all tables in Sql Server for a set of databases
Here is a convenient Sql script to rebuild all indexes in a set of databases in Sql Server.
RebuildAllDatabaseIndexesInSetOfDatabases.sql
/*
This script loops through all databases with names containing a specified prefix or matching a specific name.
For each database, it rebuilds indexes on all tables to improve performance by reducing fragmentation.
Variables:
- @Prefix: The prefix to match database names.
- @SomeSpecialSharedDatabaseName: The specific database specific shared database name to include in the loop.
- @DatabaseName: The name of the current database in the loop.
- @TableName: The name of the current table in the loop.
- @SQL: The dynamic SQL statement to execute.
Steps:
1. Declare the necessary variables.
2. Create a cursor to loop through the databases.
3. For each database, use another cursor to loop through all tables and rebuild their indexes.
4. Print progress messages for each database and table.
*/
DECLARE @SomeAcmeUnitDatabaseNamePrefix NVARCHAR(255) = 'SomeAcmeUnit';
DECLARE @SomeSpecialSharedDatabaseName NVARCHAR(255) = 'SomeAcmeShared';
DECLARE @DatabaseName NVARCHAR(255);
DECLARE @TableName NVARCHAR(255);
DECLARE @SQL NVARCHAR(MAX);
DECLARE @DatabaseCount INT;
DECLARE @CurrentDatabaseCount INT = 0;
DECLARE @TableCount INT;
DECLARE @CurrentTableCount INT = 0;
DECLARE @StartTime DATETIME2;
DECLARE @EndTime DATETIME2;
DECLARE @ElapsedTime NVARCHAR(100);
SET @StartTime = SYSDATETIME();
-- Get the total number of databases to process
SELECT @DatabaseCount = COUNT(*)
FROM sys.databases
WHERE [name] LIKE @SomeAcmeUnitDatabaseNamePrefix + '%' OR [name] = @SomeSpecialSharedDatabaseName;
DECLARE DatabaseCursor CURSOR FOR
SELECT [name]
FROM sys.databases
WHERE [name] LIKE @SomeAcmeUnitDatabaseNamePrefix + '%' OR [name] = @SomeSpecialSharedDatabaseName;
OPEN DatabaseCursor;
FETCH NEXT FROM DatabaseCursor INTO @DatabaseName;
WHILE @@FETCH_STATUS = 0
BEGIN
SET @CurrentDatabaseCount = @CurrentDatabaseCount + 1;
SET @SQL = '
RAISERROR(''*****************************************************************'', 0, 1) WITH NOWAIT;
RAISERROR(''REBUILDING ALL INDEXES OF TABLES INSIDE DB: %s'', 0, 1, @DatabaseName) WITH NOWAIT;
RAISERROR(''*****************************************************************'', 0, 1) WITH NOWAIT;
DECLARE @TableName NVARCHAR(255);
DECLARE @SQL NVARCHAR(MAX);
-- Get the total number of tables to process in the current database
SELECT @TableCount = COUNT(*)
FROM sys.tables;
DECLARE TableCursor CURSOR FOR
SELECT QUOTENAME(SCHEMA_NAME(schema_id)) + ''.'' + QUOTENAME(name)
FROM sys.tables ORDER BY QUOTENAME(name) ASC;
OPEN TableCursor;
FETCH NEXT FROM TableCursor INTO @TableName;
WHILE @@FETCH_STATUS = 0
BEGIN
SET @CurrentTableCount = @CurrentTableCount + 1;
PRINT ''Rebuilding database indexes on table: '' + @TableName + ''... (DB: '' + CAST(@CurrentDatabaseCount AS NVARCHAR) + ''/'' + CAST(@DatabaseCount AS NVARCHAR) + '')'' + ''... (Table: '' + CAST(@CurrentTableCount AS NVARCHAR) + ''/'' + CAST(@TableCount AS NVARCHAR) + '')'';
--RAISERROR(''..Indexing (hit Ctrl+End to go to latest message inside this buffer)'',0,1) WITH NOWAIT;
SET @SQL = ''ALTER INDEX ALL ON '' + @TableName + '' REBUILD'';
EXEC sp_executesql @SQL;
FETCH NEXT FROM TableCursor INTO @TableName;
--PRINT ''Rebuilt database indexes on table: '' + @TableName + ''.'';
END;
CLOSE TableCursor;
DEALLOCATE TableCursor;';
EXEC sp_executesql @SQL,
N'@CurrentDatabaseCount INT, @DatabaseCount INT, @TableCount INT, @CurrentTableCount INT, @DatabaseName NVARCHAR(255)',
@CurrentDatabaseCount, @DatabaseCount, @TableCount, @CurrentTableCount, @DatabaseName;
FETCH NEXT FROM DatabaseCursor INTO @DatabaseName;
END;
SET @EndTime = SYSDATETIME()
-- Calculate the elapsed time
DECLARE @ElapsedSeconds INT = DATEDIFF(SECOND, @StartTime, @EndTime);
DECLARE @ElapsedMilliseconds INT = DATEPART(MILLISECOND, @EndTime) - DATEPART(MILLISECOND, @StartTime);
-- Combine seconds and milliseconds
SET @ElapsedTime = CAST(@ElapsedSeconds AS NVARCHAR) + '.' + RIGHT('000' + CAST(@ElapsedMilliseconds AS NVARCHAR), 3);
-- Print the elapsed time using RAISERROR
RAISERROR('Total execution time: %s seconds', 0, 1, @ElapsedTime) WITH NOWAIT;
CLOSE DatabaseCursor;
DEALLOCATE DatabaseCursor;
The SQL Script uses loops defined by T-SQL database cursors and uses the sys.databases and sys.tables system views to loop through all the databases matching the set prefixes of 'unit databases' like 'SomeAcmeUnit_0', 'SomeAcmeUnit_1' and 'shared database like 'SomeAcmeShared'.
This matches a setup of many databases used in companies, where you just want to rebuild all the indexes in all the tables in all the set of databases. It can be handy to be sure that all indexes are rebuilt.
Etiketter:
Database admin,
DbAdmin,
Performance,
SQL Server,
T-SQL
Wednesday, 27 March 2024
Importing Json File to SQL Server into a variable
A short article today of how to import JSON file to SQL Server into a variable, which can then
be used to insert it into a column of type NVARCHAR(MAX) of a table. The maximum size of NVARCHAR(MAX) is 2 Gb, so you can
import large Json files using this datatype. If the Json is small and below 4000 chars, use for example NVARCHAR(4000) instead. Here is a SQL script to import the json file using OPENROWSET and Bulk import. We also pass in the path to the folder where the json file is. It is put in the same folder as the .sql file script. Note that the variable $(FullScriptDir) is passed in via a .bat file (shown further below) and we expect the .json file to be in the same folder as the .bat file. You can provide a full path to a .json file instead and skip the .bat file here and import a json file, but it is nice to load the .json file from the same folder as the .sql file in case you want to copy the .sql and .json file to another server and not having to provide and possibly having to adjust the full path. Sql-script import_json_file_openrowset.sql:
Importing json from a string directly using OPENJSON
It is also possible to directly just import the JSON from a string variable like this:
be used to insert it into a column of type NVARCHAR(MAX) of a table. The maximum size of NVARCHAR(MAX) is 2 Gb, so you can
import large Json files using this datatype. If the Json is small and below 4000 chars, use for example NVARCHAR(4000) instead. Here is a SQL script to import the json file using OPENROWSET and Bulk import. We also pass in the path to the folder where the json file is. It is put in the same folder as the .sql file script. Note that the variable $(FullScriptDir) is passed in via a .bat file (shown further below) and we expect the .json file to be in the same folder as the .bat file. You can provide a full path to a .json file instead and skip the .bat file here and import a json file, but it is nice to load the .json file from the same folder as the .sql file in case you want to copy the .sql and .json file to another server and not having to provide and possibly having to adjust the full path. Sql-script import_json_file_openrowset.sql:
DECLARE @JSONFILE VARCHAR(MAX);
SELECT @JSONFILE = BulkColumn
FROM OPENROWSET (BULK '$(FullScriptDir)\top-posts.json', SINGLE_CLOB) AS j;
PRINT 'JsonFile contents: ' + @JSONFILE
IF (ISJSON(@JSONFILE)=1) PRINT 'It is valid Json';
The .bat file here passes the current folder as a variable to the sql script
runsqlscript.bat
@set FullScriptDir=%CD%
sqlcmd -S .\SQLEXPRESS -i import_json_file_openrowset.sql
This outputs:
sqlcmd -S .\SQLEXPRESS -i import_json_file_openrowset.sql
JsonFile contents: [
{
"Id":6107,
"Score":176,
"ViewCount":155988,
"Title":"What are deconvolutional layers?",
"OwnerUserId":8820
},
{
"Id":155,
"Score":164,
"ViewCount":25822,
"Title":"Publicly Available Datasets",
"OwnerUserId":227
}
]
It is valid Json
With the variable JSONFILE you can do whatever with it such as inserting it to a column in a new row of a table for example.
Importing json from a string directly using OPENJSON
It is also possible to directly just import the JSON from a string variable like this:
DECLARE @JSONSTRINGSAMPLE VARCHAR(MAX)
SET @JSONSTRINGSAMPLE = N'[
{
"Id": 2334,
"Score": 4.3,
"Title": "Python - Used as scientific tool for graphing"
},
{
"Id": 2335,
"Score": 5.2,
"Title": "C# : Math and physics programming"
}
]';
SELECT * FROM OPENJSON (@JSONSTRINGSAMPLE) WITH (
Id INT,
Score REAL,
Title NVARCHAR(100)
)

Sunday, 14 January 2024
Generating repeated data into variable in SQL Server in T-SQL
Let's see how we can create repeated data into variable of SQL Server in T-SQL.
Use the REPLICATE function to create repeated data like this:
In case you want to set the variable to data which is longer than 8000 characters, you must convert the argument to NVARCHAR(MAX).
DECLARE @myVariable NVARCHAR(MAX)
SET @myVariable = REPLICATE('.', 10)
PRINT @myVariable
PRINT len(@myVariable)

In case you want to set the variable to data which is longer than 8000 characters, you must convert the argument to NVARCHAR(MAX).
DECLARE @myVariable NVARCHAR(MAX)
SET @myVariable = REPLICATE(CONVERT(NVARCHAR(MAX),'.'), 1024*1024*2)
PRINT len(@myVariable)
Creating random content is also easy in T-SQL:
DECLARE @myVariable NVARCHAR(MAX)
SET @myVariable = REPLICATE(CONVERT(NVARCHAR(MAX),REPLACE(NEWID(),'-', '')), 4)
PRINT len(@myVariable)
PRINT @myVariable
NEWID() creates a new guid, and we strip away the '-' letter, giving 32 chars which we replicate above four times. Since we were below 8000 chars, we chould have skipped using convert to nvarchar(max).
Sunday, 29 September 2019
Deleting a set of databases with similar names with T-SQL
Devops Sunday.
If you end up having many databases in SQL Server and want to get rid of them by matching their names, this T-SQL should help you out.
use master go declare @tablestoNuke as table(db nvarchar(100)) insert into @tablestoNuke select name from sys.databases where name like '%SomeSimilarDbNameSet%' declare @nukedb as nvarchar(100) declare @nukesql as nvarchar(150) declare nuker cursor for select db from @tablestoNuke open nuker fetch next from nuker into @nukedb while @@FETCH_STATUS = 0 begin set @nukesql = 'drop database ' + @nukedb exec sp_executesql @nukesql fetch next from nuker into @nukedb end close nuker deallocate nuker print 'All done nuking'The T-SQL uses a cursor to loop through the database names fetched from sys.databases view on master db and then uses exec sp_executesql to delete the databases, by dropping them.
Tuesday, 30 April 2019
Finding databases with biggest tables in SQL Server
I created a T-SQL script today for use with SQL Server that will list the tables that have most rows for SQL Server, able to loop through all databases.
DECLARE @currentDB AS VARCHAR(100) DECLARE @mrsdb_sqlscript AS NVARCHAR(500) DECLARE db_cursor CURSOR FOR SELECT name from sys.databases where state_desc <> 'offline' and name not in ('master', 'tempdb', 'model', 'msdb') order by name DROP TABLE IF EXISTS #largestTables CREATE TABLE #largestTables ( DatabaseName VARCHAR(100), SchemaName VARCHAR(200), TableName VARCHAR(200), TotalRowCount INT ) OPEN db_cursor FETCH NEXT from db_cursor into @currentDB WHILE @@FETCH_STATUS = 0 BEGIN SET @mrsdb_sqlscript = N'INSERT INTO #largestTables SELECT ''' + @currentDB + ''' , SCHEMA_NAME(schema_id) AS [SchemaName], [Tables].name AS [TableName], SUM([Partitions].[rows]) AS [TotalRowCount] FROM ' + @currentDB + '.sys.tables AS [Tables] JOIN ' + @currentDB + '.sys.partitions AS [Partitions] ON [Tables].[object_id] = [Partitions].[object_id] AND [Partitions].index_id IN ( 0, 1 ) GROUP BY SCHEMA_NAME(schema_id), [Tables].name'; PRINT 'Looking for largest table in the following DB: ' + @currentDB exec sp_executesql @mrsdb_sqlscript FETCH NEXT FROM db_cursor into @currentDB END CLOSE db_cursor DEALLOCATE db_cursor SELECT TOP 100 * FROM #largestTables ORDER BY TotalRowCount DESC, Schemaname ASCThe SQL script above will use a temporary table variable and database cursor and loop through all databases, executing a SQL script that will insert data into a temporary table variable and after the cursor has looped through its dataset, the result in the temporary table variable is presented to the DBA monitoring. Use it to spot where you have much data in your databases of the data base server you are working with!
Subscribe to:
Posts (Atom)