/****************************************************************/ /* Document : SQL Server 7/2000 DBA queries AND tips */ /* Doc. Versie : 38 */ /* File : sqlserver.txt */ /* Date : 28-02-2005 */ /* Content : a few handy DBA queries AND examples. */ /* */ /* Compiled by : Albert */ /****************************************************************/ ---------------------------------------------------------------------- 1. SCRIPTS FOR FINDING BLOCKED PROCESSES AND LOCKS IN SQL Server 2000: ---------------------------------------------------------------------- -- 1.1 SHOW ACTIVE PROCESSES IN SQL SERVER -- IF for a process the blocked column is not zero, it is blocked SELECT spid, cpu, physical_io, blocked, cmd, waittime, substring(convert(varchar(20),last_batch), 1, 20) as "LASTBATCH", substring(nt_username, 1, 15) AS "USERNAME", substring(loginame, 1, 20) AS "LOGINNAME", substring(hostname, 1, 15) AS "HOSTNAME", substring(program_name, 1, 40) AS "PROGRAM" FROM master.dbo.sysprocesses WHERE cmd NOT LIKE 'AWAIT%' -- 1.2 SHOW THE MOST INTERRESTING FIELDS FROM SYSPROCESSES USE master SELECT spid, cpu, physical_io, blocked, cmd, waittime, substring(convert(varchar(20),last_batch), 1, 20) as "LASTBATCH", substring(nt_username, 1, 15) AS "USERNAME", substring(loginame, 1, 20) AS "LOGINNAME", substring(hostname, 1, 15) AS "HOSTNAME", substring(program_name, 1, 40) AS "PROGRAM" FROM master.dbo.sysprocesses -- WHERE cpu>50 AND physical_io>50 -- 1.3 LOCKS IN THE SYSTEM The Enterprise Manager can show you all locks in the system via a graphical interface. But you can also USE queries lauched FROM the Query Analyzer such as the following query on 'master.dbo.syslockinfo'. /* Some information about locks in the SYSTEM */ /* This query complements the EM "Current Activity Window */ /* rsc_type: 1=nothing, 2=database, 3=file, 4=index, 5=table, 6=page, 7=key, 8=extent, 9=RID */ USE master SELECT s.spid,l.req_spid, s.cpu, s.physical_io, s.blocked, s.cmd, s.waittim e, substring(convert(varchar(20),s.last_batch), 1, 20) as "LASTBATCH", substring(s.nt_username, 1, 15) AS "USERNAME", substring(s.loginame, 1, 20) AS "LOGINNAME", substring(s.hostname, 1, 15) AS "HOSTNAME", substring(s.program_name, 1, 40) AS "PROGRAM", l.req_status AS "Status_of_Lock", l.rsc_type AS "Resource_type", l.req_mode AS "Lock_request_mode" FROM sysprocesses s, syslockinfo l WHERE s.spid=l.req_spid SELECT DISTINCT s.spid,l.req_spid, s.cpu, s.physical_io, s.blocked, s.cmd, s.waittime, substring(convert(varchar(20),s.last_batch), 1, 20) as "LASTBATCH", substring(s.nt_username, 1, 15) AS "USERNAME", substring(s.loginame, 1, 20) AS "LOGINNAME", substring(s.hostname, 1, 15) AS "HOSTNAME", substring(s.program_name, 1, 40) AS "PROGRAM", l.req_status AS "Status_of_Lock", l.rsc_type AS "Resource_type", l.req_mode AS "Lock_request_mode" FROM sysprocesses s, syslockinfo l WHERE s.spid=l.req_spid AND l.rsc_type in (4,5,6,7,8,9,10) ---------------------------------------------------------------------- 2. Examples of conversion AND stringfunctions: ---------------------------------------------------------------------- Example 1: DATEDIFF: difference of dates ---------------------------------------- Example: -------- USE msdb DECLARE @v1 VARCHAR(30) DECLARE @v2 VARCHAR(30) SELECT @v1=max(backup_finish_date) FROM backupset SELECT @v2=getdate() IF (SELECT DATEDIFF(day, @v1, @v2)) in (0, 1) BEGIN BACKUP LOG sales TO sales_log_dump WITH INIT END Example 2: PATINDEX() string function ------------------------------------- IF you want to find the starting position of WHERE a string starts FROM in a column or expression, you can USE the PATINDEX("pattern", column) function. This example finds the position at which the pattern “wonderful” BEGINs in a specIFic row of the notes column in the titles table. USE pubs GO SELECT PATINDEX('%wonderful%', notes) FROM titles WHERE title_id = 'TC3218' Example 3: The CHARINDEX() string function ------------------------------------------ Returns the starting position of the specIFied expression in a character string. CHARINDEX ( expression1 , expression2 [ , start_location ] ) IF expression1 is not found within expression2, CHARINDEX returns 0. USE PUBS GO SELECT CHARINDEX('wonderful', notes) FROM titles WHERE title_id = 'TC3218' GO IF (charindex('\', @loginame) = 0) BEGIN raiserror(15407, -1, -1, @loginame) return (1) END IF @importpath IS NULL or (charindex('\', @importpath) = 0) BEGIN PRINT 'whatever you wanted to print here.. ' RETURN END Example 4: The REPLACE() string function ---------------------------------------- This is very usefull IF you need to replace a string, or part of a string, in a field in a table: REPLACE(field, 'string_to_be_replaced', 'replacement') SELECT cust_name, city, replace(city,'Den','De') FROM customers To replace funny characters from a field: UPDATE TABLE SET FIELD=REPLACE(FIELD,CHAR(13),'') Control character Value ------- --------------- Tab CHAR(9) Line feed CHAR(10) carriage return CHAR(13) Example 5: convert datetime to char, and/or present it in a smaller format --------------------------------------------------------------------------- Example: -------- declare @x datetime declare @y varchar(20) SELECT @x=GETDATE() -- thus @x is of datatype "datetime" SELECT @x SELECT @y=convert(varchar(10),@x,20) SELECT @y Result: 2004-02-27 17:02:51.390 (this is @x, in DATETIME datatype) 2004-02-27 (this is @y, in VARCHAR(10) datatype) Example: -------- Ofcourse, GETDATE() returns datetime. So if you need to convert datetime to another format: SELECT CONVERT(varchar(30), GETDATE(), 113) -- returns 25 Jan 1999 19:29:44:893 SELECT CONVERT(varchar(30), GETDATE(), 111) -- returns 1999/01/25 SELECT CONVERT(varchar(30), GETDATE(), 102) -- returns 1999.01.25 SELECT CONVERT(varchar(30), GETDATE(), 20) -- returns 1999-04-24 15:43:07 SELECT CONVERT(varchar(10), GETDATE(), 20) -- returns 1999-04-24 Example: -------- DECLARE @myval decimal (5, 2) SET @myval = 193.57 SELECT CONVERT(decimal(10,5), CONVERT(varbinary(20), @myval)) You can have the same conversion with: SELECT CAST(CAST(@myval AS varbinary(20)) AS decimal(10,5)) Example: ------- Suppose ART_PR_IN is a float or an int. Converting FROM float or int to money: convert(money, @ART_PR_IN) Converting FROM integer to char: convert(varchar(10),@COUNT_BEFORE) Example: -------- declare @x smalldatetime declare @y varchar(20) declare @z smalldatetime SELECT @x=(select in_date from Orders where order_nr='AC95') -- thus @x is of datatype "smalldatetime" SELECT @x SELECT @y=convert(varchar(10),@x,20) SELECT @y SELECT @z=convert(smalldatetime,@y) SELECT @z Output: 1998-06-23 00:00:00 (1 row(s) affected) 1998-06-23 (1 row(s) affected) 1998-06-23 00:00:00 (1 row(s) affected) Example: -------- SELECT @x=CONVERT(float,LTRIM(RTRIM(SUBSTRING(EENH_PRG_VRL,2,LEN(EENH_PRG_VRL)-2)))) SELECT @y=CONVERT(smalldatetime,LTRIM(RTRIM(SUBSTRING(EENH_STER_DT,2,LEN(EENH_STER_DT)-2)))) Example: -------- USE pubs SELECT 'The order date is ' + CAST(ord_date AS varchar(30)) FROM sales WHERE ord_num = 'A2976' ORDER BY ord_num Example 6: function ISNULL, Replace null value ---------------------------------------------- The function ISNULL: Replaces NULL with the specIFied replacement value. Syntax ISNULL ( check_expression , replacement_value ) USE pubs GO SELECT AVG(ISNULL(price, $10.00)) FROM titles GO So, if a value in the price column is null, the IsNull function substitutes the value of $10.00. Example 7: function ISDATE, Check date -------------------------------------- The function ISDATE: Determines whether an input expression is a valid date. Syntax: ISDATE ( expression ) DECLARE @datestring varchar(8) SET @datestring = '12/21/98' SELECT ISDATE(@datestring) This will return 1 IF ISDATE(@BEGINdatum) <> 1 or ISDATE(@einddatum) <> 1 BEGIN print 'invalid date, please use the format YYYY-MM-DD ' goto error_section END Example 8: function LTRIM, Removing leading spaces -------------------------------------------------- The function LTRIM: Returns a character expression after removing leading blanks. Syntax: LTRIM ( character_expression ) DECLARE @string_to_trim varchar(60) SET @string_to_trim = ' Five spaces are at the BEGINning of this string.' SELECT 'Here is the string without the leading spaces: ' + LTRIM(@string_to_trim) Example 9: LEFT function, used to return 'n' leftmost characters ----------------------------------------------------------------- Syntax: LEFT(character_expression,n) SELECT LEFT(title, 5) FROM titles ORDER BY title_id GO Here is the example result set: ----- The B Cooki You C Example 10: RIGHT function, used to return 'n' rightmost characters ------------------------------------------------------------------- Syntax: RIGHT(character_expression,n) SELECT RIGHT(au_fname, 5) FROM authors ORDER BY au_fname GO Here is the result set: ------------------ raham Akiko lbert Ann Anne Example 11: function LEN, number of characters ---------------------------------------------- The function LEN: Returns the number of characters, rather than the number of bytes, of the given string expression, excluding trailing blanks. Syntax LEN ( string_expression ) USE Northwind GO SELECT LEN(CompanyName) AS 'Length', CompanyName FROM Customers WHERE Country = 'FinlAND' Example 12: remove '' FROM fields: ---------------------------------- Suppose you have a table like id name city '1' 'Harry' 'Boston' '2' 'Miriam' 'Seattle' etc.. WHERE all fields are enclosed by ''. To create a new table without these quotes, you can use the following example. DECLARE @id varchar(15) DECLARE @name varchar(15) DECLARE @city varchar(15) DECLARE @length_id INT DECLARE @length_name INT DECLARE @length_city INT DECLARE @id2 varchar(15) DECLARE @name2 varchar(15) DECLARE @city2 varchar(15) DECLARE cur1 CURSOR FOR SELECT id, name, city FROM tab1 OPEN cur1 FETCH NEXT FROM cur1 INTO @id, @name, @city WHILE (@@fetch_status<>-1) BEGIN SELECT @length_id =LEN(@id) SELECT @length_name =LEN(@name) SELECT @length_city =LEN(@city) SELECT @id2 =substring(@id,2,@length_id-2) SELECT @name2 =substring(@name,2,@length_name-2) SELECT @city2 =substring(@city,2,@length_city-2) INSERT INTO tab2 values (@id2,@name2,@city2) FETCH NEXT FROM cur1 INTO @id, @name, @city END CLOSE cur1 DEALLOCATE cur1 Example 13: How to use ' in char or varchar: -------------------------------------------- DECLARE @VAR1 VARCHAR(64) SET @VAR1=' Appie''s ' SELECT @var1 Result: Appie's Example 14: Find datatypes of table columns: -------------------------------------------- SELECT substring(c.name, 1, 30) as "ColumName", c.xtype, substring(object_name(c.id),1,30) as "TableName", substring(t.name,1,30) as "DataType" FROM syscolumns c, systypes t WHERE c.xtype=t.xtype AND object_name(c.id)='Orders' -- fill in the tablename Example 15: Find names of table columns like '%x%: -------------------------------------------------- SELECT substring(c.name, 1, 30) as "ColumName", c.xtype, substring(object_name(c.id),1,30) as "TableName", substring(t.name,1,30) as "DataType" FROM syscolumns c, systypes t WHERE c.xtype=t.xtype AND c.name like '%lock%' -- fill in the fieldname AND object_name(c.id) not like 'stp_%' For SQL2005, you can also use: select object_id, object_name(object_id) as object_name,name from master.dbo.sys.all_columns where name like '%lock%' Example 16: Remove characters FROM string or field: --------------------------------------------------- 16.1 ThE following sample code can be used to remove or save any range of characters from a string or field. declare @s varchar(100), @i int SELECT @s = 'asd i/.,<>as>[{}]vnbv' SELECT @s SELECT @i = patindex('%[^a-z^A-Z^0-9^ ]%', @s) while @i > 0 BEGIN SELECT @s = replace(@s, substring(@s, @i, 1), '') SELECT @i = patindex('%[^a-z^A-Z^0-9^ ]%', @s) END SELECT @s gives before asd i/.,<>as>[{}]vnbv after asd iasvnbv 16.2 Removing the characters FROM a field in a table create table #a (s varchar(100)) INSERT #a (s) SELECT 'asd i/.,<>as>[{}]vnbv' INSERT #a (s) SELECT 'aaa' INSERT #a (s) SELECT '123 ''h 9)' SELECT * FROM #a while @@rowcount > 0 update #a set s = replace(s, substring(s, patindex('%[^a-z^A-Z^0-9^ ]%', s), 1), '') WHERE patindex('%[^a-z^A-Z^0-9^ ]%', s) <> 0 SELECT * FROM #a Gives before asd i/.,<>as>[{}]vnbv aaa 123 'h 9) after asd iasvnbv aaa 123 h 9 16.3 Removing the characters FROM a field in a table Suppose you only want to have the numbers from a certain field, which contains all kind of characters like % * and letters. create table tab1 ( id int, badfield varchar(30) ) create table tab2 ( id int, goodfield varchar(30) ) drop table tab1 drop table tab2 insert into tab1 values (1, 'joepie1057 PC%$w') insert into tab1 values (2, '#*mooiweer2234 *&E') select * from tab1 select * from tab2 code to insert into tab2 the cleaned fields. declare @badfield varchar(100) declare @goodfield varchar(100) declare @id int declare @i int DECLARE cur1 CURSOR FOR SELECT id, badfield FROM tab1 open cur1 FETCH NEXT FROM cur1 INTO @id, @badfield WHILE (@@fetch_status<>-1) BEGIN SELECT @i = patindex('%[^0-9^]%', @badfield) while @i > 0 BEGIN SELECT @badfield = replace(@badfield, substring(@lbadfield, @i, 1), '') SELECT @i = patindex('%[^0-9^]%', @lbadfield) END SELECT @goodfield=RTRIM(LTRIM(@badfield)) insert into tab2 values (@id,@goedveld) FETCH NEXT FROM cur1 INTO @id, @badfield END CLOSE cur1 DEALLOCATE cur1 Example 17: use of logical NOT operator: ---------------------------------------- Suppose we have some sort of inventory table: CREATE TABLE INVENTORY_TABLE ( DEVICE_ID INT, NAME VARCHAR(64) ) Now put some sample records in this table: insert into INVENTORY_TABLE values (1, 'NL_AMSTERDAM_LAP_07') insert into INVENTORY_TABLE values (2, 'PLUTO') insert into INVENTORY_TABLE values (3, 'NL_ALKMAAR-LAP_21') insert into INVENTORY_TABLE values (4, 'STARBOSS') insert into INVENTORY_TABLE values (5, 'US_NY_DSK007') select * from INVENTORY_TABLE DEVICE_ID NAME ----------- ------------------- 1 NL_AMSTERDAM_LAP_07 2 PLUTO 3 NL_ALKMAAR-LAP_21 4 STARBOSS 5 US_NY_DSK007 (5 row(s) affected) Now try this: SELECT COUNT(*) FROM INVENTORY_TABLE WHERE NOT (NAME LIKE '%LAP%' OR NAME LIKE '%DSK%') Result=2, this is what we want. Or try this: SELECT COUNT(*) FROM INVENTORY_TABLE WHERE NAME NOT LIKE '%LAP%' AND NAME NOT LIKE '%DSK%' Result=2, so this is also OK. Now try this: SELECT COUNT(*) FROM INVENTORY_TABLE WHERE NAME NOT LIKE '%LAP%' OR NAME NOT LIKE '%DSK%' Result=5, this is probably NOT what we wanted to have as a result ! ---------------------------------------------------------------------- 3. EXAMPLES OF JOINS AND SUMMARIZING DATA: ---------------------------------------------------------------------- 3.1. Create three sample tables: -------------------------------- In order to demonstrate the joins and summarizations in the next sections, let us first create some example tables. create table LOC ( LOCID int, CITY varchar(16), constraint pk_loc primary key (locid) ) create table DEPT ( DEPID int, DEPTNAME varchar(16), LOCID int, constraint pk_dept primary key (depid), constraint fk_dept_loc foreign key (locid) references loc(locid) ) create table EMP ( EMPID int, EMPNAME varchar(16), DEPID int, SAL int, constraint pk_emp primary key (empid), constraint fk_emp_dept foreign key (depid) references dept(depid) ) -- --------------------------------------------------------------------- 3.2. Now insert some sample records: ------------------------------------ INSERT INTO LOC VALUES (1,'Amsterdam'); INSERT INTO LOC VALUES (2,'Haarlem'); INSERT INTO LOC VALUES (3,null); INSERT INTO LOC VALUES (4,'Utrecht'); INSERT INTO DEPT VALUES (1,'Sales',1); INSERT INTO DEPT VALUES (2,'PZ',1); INSERT INTO DEPT VALUES (3,'Management',2); INSERT INTO DEPT VALUES (4,'RD',3); INSERT INTO DEPT VALUES (5,'IT',4); INSERT INTO EMP VALUES (1,'Joop',1,1000); INSERT INTO EMP VALUES (2,'Gerrit',2,500); INSERT INTO EMP VALUES (3,'Harry',2,2000); INSERT INTO EMP VALUES (4,'Christa',3,900); INSERT INTO EMP VALUES (5,null,4,3000); INSERT INTO EMP VALUES (6,'Nina',5,5000); INSERT INTO EMP VALUES (7,'Nadia',5,4000); -- ---------------------------------------------------------------------- 3.3. Show whats in these tables: -------------------------------- SELECT * FROM emp SELECT * FROM dept SELECT * FROM loc empid empname depid ----------- ---------------- ----------- 1 Joop 1 2 Gerrit 2 3 Harry 2 4 Christa 3 5 NULL 4 6 Nina 5 7 Nadia 5 (7 row(s) affected) depid deptname locid ----------- ---------------- ----------- 1 Sales 1 2 PZ 1 3 Management 2 4 RD 3 5 IT 4 (5 row(s) affected) locid city ----------- ---------------- 1 Amsterdam 2 Haarlem 3 NULL 4 Utrecht (4 row(s) affected) -- ---------------------------------------------------------------------- 3.4. Let's try some join statements: ------------------------------------ Query 1: -------- SELECT deptname, city FROM dept, loc WHERE dept.locid=loc.locid SELECT deptname, city FROM dept INNER JOIN loc ON dept.locid=loc.locid Result 1: -------- deptname city ---------------- ---------------- Sales Amsterdam PZ Amsterdam Management Haarlem RD NULL IT Utrecht Query 2: -------- SELECT e.empid, e.empname, d.depid, d.deptname FROM emp e, dept d WHERE e.depid=d.depid SELECT e.empid, e.empname, d.depid, d.deptname FROM emp e INNER JOIN dept d ON e.depid=d.depid Result 2: --------- empid empname depid deptname ----------- ---------------- ----------- ---------------- 1 Joop 1 Sales 2 Gerrit 2 PZ 3 Harry 2 PZ 4 Christa 3 Management 5 NULL 4 RD 6 Nina 5 IT 7 Nadia 5 IT So Nina and Nadia are both in the IT department. Query 3: -------- SELECT e.empid, e.empname, d.depid, d.deptname, l.locid, l.city FROM emp e INNER JOIN dept d ON e.depid=d.depid INNER JOIN loc l ON d.locid=l.locid Result 3: --------- empid empname depid deptname locid city ----------- ---------------- ----------- ---------------- ----------- ---------------- 1 Joop 1 Sales 1 Amsterdam 2 Gerrit 2 PZ 1 Amsterdam 3 Harry 2 PZ 1 Amsterdam 4 Christa 3 Management 2 Haarlem 5 NULL 4 RD 3 NULL 6 Nina 5 IT 4 Utrecht 7 Nadia 5 IT 4 Utrecht So both Nina and Nadia are in the IT department in Utrecht Query 4: -------- SELECT e.empid, e.empname, d.depid, d.deptname, l.locid, l.city FROM emp e LEFT JOIN dept d ON e.depid=d.depid LEFT JOIN loc l ON d.locid=l.locid Result 4: --------- empid empname depid deptname locid city ----------- ---------------- ----------- ---------------- ----------- ---------------- 1 Joop 1 Sales 1 Amsterdam 2 Gerrit 2 PZ 1 Amsterdam 3 Harry 2 PZ 1 Amsterdam 4 Christa 3 Management 2 Haarlem 5 NULL 4 RD 3 NULL 6 Nina 5 IT 4 Utrecht 7 Nadia 5 IT 4 Utrecht In this case, there is no difference between the INNER JOINS in Query 3 and the LEFT JOINS in Query 4. This is so because there are no NULL values in the common key. But in general it could be different. In general, use a LEFT JOINT to see all values FROM the "left" table even if it has possible NULL values in the common key. In general, use a RIGHT JOINT to see all values FROM the "right" table even if it has possible NULL values in the common key. -- -------------------------------------------------------------------- 3.5 Use of ROLLUP and CUBE: --------------------------- Suppose we have the following simple table: create table emp2 ( name varchar(10), city varchar(10), salary decimal(7,2) ) Let's put some example values into emp2: insert into emp2 values ('joop','amsterdam',2000.00) insert into emp2 values ('klaas','Haarlem',3000.00) insert into emp2 values ('marie','amsterdam',1000.00) insert into emp2 values ('nadia','alkmaar',4000.00) insert into emp2 values ('miranda','alkmaar',1500.00) insert into emp2 values ('maarten','haarlem',7000.00) insert into emp2 values ('nina','haarlem',6000.00) Now show all records in emp2: SELECT * FROM emp2 name city salary ---------- ---------- --------- joop amsterdam 2000.00 klaas Haarlem 3000.00 marie amsterdam 1000.00 nadia alkmaar 4000.00 miranda alkmaar 1500.00 maarten haarlem 7000.00 nina haarlem 6000.00 EXAMPLE QUERY WITH A "GROUP BY" CLAUSE: --------------------------------------- Let's try a "GROUP BY": SELECT city, sum(salary) FROM emp2 GROUP BY city city ---------- --------- alkmaar 5500.00 amsterdam 3000.00 Haarlem 16000.00 Thus use the GROUP BY in conjunction with an aggegate function like SUM(), AVG() etc.. Also, you MUST mention in the GROUP BY list, the other fields not used in the function. EXAMPLE QUERY WITH A ROLLUP CLAUSE: ----------------------------------- The ROLLUP operator is useful in generating reports that contain subtotals and totals. SELECT city, sum(salary) FROM emp2 GROUP BY city WITH ROLLUP Result: city ---------- --------- alkmaar 5500.00 amsterdam 3000.00 Haarlem 16000.00 NULL 24500.00 SELECT name, city, sum(salary) FROM emp2 GROUP BY city, name WITH ROLLUP Result: name city ---------- ---------- --------- miranda alkmaar 1500.00 nadia alkmaar 4000.00 NULL alkmaar 5500.00 joop amsterdam 2000.00 marie amsterdam 1000.00 NULL amsterdam 3000.00 klaas Haarlem 3000.00 maarten haarlem 7000.00 nina haarlem 6000.00 NULL Haarlem 16000.00 NULL NULL 24500.00 COMPUTE BY example: ------------------- An alternative for ROLLUP is the COMPUTE and COMPUTE BY clause. SELECT name, salary FROM emp2 ORDER BY name compute sum(salary) Result: name salary ---------- --------- joop 2000.00 klaas 3000.00 maarten 7000.00 marie 1000.00 miranda 1500.00 nadia 4000.00 nina 6000.00 sum ======== 24500.00 3.6. An example of a very simple Relation Database: --------------------------------------------------- (PK): Primary Key; (FK): Foreign Key -------------------- |TABLE CUSTOMERS: | |------------------| ----------|Cust_ID (PK) | | |Cust_name | | |Address | | |Postal+code | | |City | | |Country | | -------------------- | | | ----------------- -------------------- | 1:n |TABLE ORDERS | |TABLE ORDERDETAIL | | |----------------| 1:n |------------------| | |Order_id (PK) |-------<<--|Order_id (PK/FK) | |---<<----|Cust_id | |Product_id (PK/FK |---<>---- |Order_date | |Quantity | | |Emp_id |-->>-- |Discount | | ------------------ | | | | | -------------------- | 1:n | 1:1 | | | ------------------ | -------------------- | |TABLE EMPLOYEES | | |TABLE PRODUCTS | | |----------------| | |------------------| | |Emp_id (PK) |--------| |Product_id |--------- |Name | |Product_name | |Lastname | |No_In_Stock | ------------------ |To_Order | |Price | -------------------- Here is the DDL: -- this is comment -- dbo is database owner, but could be other owner or schema USE SALES -- or other database of your choice GO CREATE TABLE [dbo].[Customers] ( Cust_id int NOT NULL, Cust_name varchar(20) NOT NULL, Address varchar(30), City varchar(20), Country varchar(20), CONSTRAINT pk_cust PRIMARY KEY (cust_id) ) ON DATA -- The data filegroup of the sales database GO CREATE TABLE [dbo].[Employees] ( Emp_id int NOT NULL, Name varchar(20) NOT NULL, LastName varchar(30) NOT NULL, CONSTRAINT pk_emp PRIMARY KEY (emp_id) ) ON DATA -- The data filegroup of the sales database GO CREATE TABLE [dbo].[Products] ( Product_id int NOT NULL, Product_Name varchar(20) NOT NULL, Unit_price decimal(7,2) NOT NULL, No_In_Stock int NOT NULL, To_Order char(1) NOT NULL, -- boolean field: y or n CONSTRAINT pk_product PRIMARY KEY (product_id) ) ON DATA -- The data filegroup of the sales database GO CREATE TABLE [dbo].[Orders] ( Order_id int IDENTITY(1,1) NOT NULL, Cust_id int NOT NULL, Emp_id int NOT NULL, Order_date datetime NOT NULL default getdate(), CONSTRAINT pk_order PRIMARY KEY (order_id), CONSTRAINT fk_cust_id FOREIGN KEY (cust_id) REFERENCES dbo.customers (cust_id), CONSTRAINT fk_emp_id FOREIGN KEY (emp_id) REFERENCES dbo.employees (emp_id) ) ON DATA -- The data filegroup of the sales database GO CREATE TABLE [dbo].[OrderDetail] ( Order_id int NOT NULL, Product_id int NOT NULL, Quantity int NOT NULL, CONSTRAINT pk_detail PRIMARY KEY (order_id,product_id), CONSTRAINT fk_product FOREIGN KEY (product_id) REFERENCES dbo.products (product_id), CONSTRAINT fk_order FOREIGN KEY (order_id) REFERENCES dbo.orders (order_id) ) ON DATA -- The data filegroup of the sales database GO ---------------------------------------------------------------------- 4. Database settable options: sp_dboption ---------------------------------------------------------------------- To show the properties of your databases (like for example, 'Truncate log on checkpoint' etc..) you can USE the stored procedure 'sp_dboption'. You can also alter any property by this stored procedure. Example: to show the properties for the PUBS database only, you can execute the following in the Query Analyzer exec sp_dboption pubs IF you want to document the properties for all database, you can USE the following script: -- BEGIN SCRIPT -- You should probably set the "Results in text" -- option on in the Query Analyzer DECLARE @dbname VARCHAR(30) DECLARE cur1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases OPEN cur1 FETCH NEXT FROM cur1 INTO @dbname WHILE (@@fetch_status<>-1) BEGIN PRINT 'DATABASE OPTION SET FOR: ' +@dbname PRINT ' ' EXEC ('sp_dboption '+@dbname) FETCH NEXT FROM cur1 INTO @dbname END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT ---------------------------------------------------------------------- 5. SCRIPTS FOR DOCUMENTING PHYSICAL FILES, FILEGROUPS, AND THEIR PROPERTIES ---------------------------------------------------------------------- 5.1 Document the files and filegroups of a database --------------------------------------------------- You are probably interested in the physical files AND their properties, of your databases, like filenames, paths, size etc.. Every database has the system table 'sysfiles', which registers all important data about files. So when you're in a database, you can query sysfiles like in the following way: SELECT fileid, size, (size * 8 /1024) AS SIZE_IN_MB, substring(name, 1, 30) AS NAME, substring(filename, 1, 50) AS FILENAME FROM sysfiles The following query might also be USEfull. Every database has the system table 'sysfiles', but also supplementary information can be retrieved FROM the system table 'sysfilegroups'. So we can join both tables, as follows: SELECT sysfiles.fileid, sysfiles.groupid, sysfiles.size, (sysfiles.size * 8 / 1024) AS "SIZE_IN_MB", substring(sysfiles.name, 1, 15) AS NAME, substring(sysfiles.filename, 1, 30) AS FILENAME, substring(sysfilegroups.groupname, 1, 40) AS GROUPNAME FROM sysfiles, sysfilegroups WHERE sysfiles.groupid=sysfilegroups.groupid IF you want to show the fileproperties of all databases, you can USE the following script: -- BEGIN SCRIPT -- You should probably set the "Results in text" -- option on in the Query Analyzer DECLARE @dbname VARCHAR(30) DECLARE cur1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases OPEN cur1 FETCH NEXT FROM cur1 INTO @dbname WHILE (@@fetch_status<>-1) BEGIN PRINT 'DATABASE FILES FOR: ' +@dbname PRINT ' ' EXEC ('SELECT fileid, (size * 8 /1024) AS SIZE_IN_MB, substring(name, 1, 20) AS NAME, substring(filename, 1, 35) AS FILENAME FROM '+@dbname+'.dbo.sysfiles') FETCH NEXT FROM cur1 INTO @dbname END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT 5.2 Document the "devices" in SQL Server 2000 --------------------------------------------- Althoug databases consist of physical files in the operating system, the concept of "device" still exist in SQL Server 7 & 2000. But not anymore as the building block of a database as was the case in SQL server 6.x You can create in SQL Server 7 & 200, for example, "backup-dump devices" which are os files ofcourse, someWHERE on the filesystem, but are registered in SQL Server as devices in master.dbo.sysdevices. Thes files have nothing to do with database files, except that it may contain backup(s) of normal databases. You want to document the information in master.dbo.sysdevices SELECT substring(name, 1, 30) AS NAME, size, status, cntrltype, substring(phyname, 1, 50) AS FILE_NAME FROM master.dbo.sysdevices WHERE cntrltype=2 AND status=16 Name : logical name of the device Phyname : path AND physical name of the file Status : 16 = Backup file Cntrltype : 2= Disk backup file There are several cntrltypes, such as 2 which is a disk backup file but a registered device could also be a tapedevice. ---------------------------------------------------------------------- 6. SCRIPTS FOR DOCUMENTING ALL YOUR LOGINS, USERS, ROLES, AND ROLEMEMBERS ---------------------------------------------------------------------- -- 6.1 SHOW ALL LOGINS (NT Authentication, SQL Server Mixed) SELECT substring(loginname, 1, 30) as LOGINNAME, isntname, isntgroup, substring(dbname, 1, 30) as DEFAULT_DB, createdate FROM master.dbo.syslogins -- 6.2 SHOW USERS OF A DATABASE SELECT l.suid, substring(l.name, 1, 20) as "LOGINNAME", u.uid, substring(u.name,1,20) as "DBNAME", u.suid, u.isntuser, u.issqluser FROM master.dbo.syslogins l, sysusers u WHERE l.suid=u.suid ORDER BY u.name SELECT l.sid, substring(l.name, 1, 20) as "LOGINNAME", u.uid, substring(u.name,1,20) as "DBNAME", u.sid, u.isntuser, u.issqluser FROM master.dbo.syslogins l, sysusers u WHERE l.sid=u.sid ORDER BY u.name -- 6.3 SHOW ALL USERS OF ALL DATABASES -- BEGIN SCRIPT -- You should probably set the "Results in text" -- option on in the Query Analyzer DECLARE @db_name VARCHAR(30) DECLARE cur1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases OPEN cur1 FETCH NEXT FROM cur1 INTO @db_name WHILE (@@fetch_status<>-1) BEGIN PRINT 'DATABASE USERS PLUS ROLES FOR DATABASE: '+@db_name EXEC ('SELECT substring(name, 1, 20) AS NAME, uid AS USER_ID FROM '+@db_name+'.dbo.sysUSErs') FETCH NEXT FROM cur1 INTO @db_name END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT -- 6.4 SHOW ALL ROLEMEMBERS OF ALL ROLES OF ALL DATABASES -- BEGIN SCRIPT -- You should probably set the "Results in text" -- option on in the Query Analyzer DECLARE @dbname VARCHAR(30) DECLARE cur1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases OPEN cur1 FETCH NEXT FROM cur1 INTO @dbname WHILE (@@fetch_status<>-1) BEGIN PRINT 'DATABASE ROLES AND MEMBERS FOR: ' +@dbname PRINT ' ' EXEC('SELECT DbRole = substring(g.name, 1, 30), MemberName = substring(u.name, 1, 30) FROM '+@dbname+'.dbo.sysUSErs'+' u'+',' +@dbname+'.dbo.sysUSErs'+' g'+',' +@dbname+'.dbo.sysmembers'+' m ' +'WHERE g.uid = m.groupuid AND g.issqlrole = 1 AND u.uid = m.memberuid ORDER BY dbrole') FETCH NEXT FROM cur1 INTO @dbname END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT ---------------------------------------------------------------------- 7. Change the owner of a set of objects in a database: ---------------------------------------------------------------------- 7.1 Change the owner of one object, like a table: ------------------------------------------------- Suppose charlie is the owner of the table 'orders' in the database 'sales'. Suppose that the ownership of orders must be changed to the new owner harry. You can USE the system stored procedure 'sp_changeobjectowner' in order to change the ownership of a object. In our example, you should USE a commAND similar to the following: USE sales exec sp_changeobjectowner 'db1.charlie.orders', 'harry' 7.2 Script for changing the owner of a set of objects, in one run: ------------------------------------------------------------------ Sometimes it might be needed to change the ownership for a large set of objects, in one automated run. Suppose in database DB1, the USEr charlie is the current owner of a large set of tables. Suppose further that the USEr john should be the owner of this set of tables. So, how can we automate the change of ownership? The following script will serve as an example of how to change the ownership for a set of tables. Run this script FROM the Query Analyzer. First change to the database WHERE the owners must be changed. Next, set the variables @old_owner AND @new_owner accordingly. -- BEGIN OF SCRIPT -- first allow updates to the system tables; the default is false exec sp_configure 'allow updates', 1 reconfigure with override go -- declaring some variables DECLARE @old_owner varchar(50) DECLARE @new_owner varchar(50) DECLARE @oldownerid int DECLARE @newownerid int DECLARE @id int DECLARE @tabname varchar(50) -- Here is WHERE you set the OLD_OWNER AND NEW_OWNER: SELECT @old_owner='charlie' SELECT @new_owner='john' SELECT @oldownerid=(SELECT uid FROM sysUSErs WHERE name=@old_owner) SELECT @newownerid=(SELECT uid FROM sysUSErs WHERE name=@new_owner) DECLARE cur1 cursor FOR SELECT name FROM sysobjects WHERE type='U' AND uid=@oldownerid OPEN cur1 FETCH NEXT FROM cur1 INTO @tabname WHILE (@@fetch_status<>-1) BEGIN UPDATE sysobjects SET uid=@newownerid WHERE name=@tabname FETCH NEXT FROM cur1 INTO @tabname END -- remove cursor FROM memory CLOSE cur1 DEALLOCATE cur1 -- setting allowing updates to system tables back to false exec sp_configure 'allow updates', 0 reconfigure with override go -- END OF SCRIPT 7.3 Change the owner of a database: ----------------------------------- Most of the time, your production databases will be owned (created) by the system administrator, or the NT/2000 Administrator (who is mapped to the sysadmins serverrole). But suppose the USEr harry 'owns' the database 'db1' (probably harry is member of the serverrole 'database creators', or has inherited the permission in another way). IF you want to change the owner to another USEr, you can USE the system stored procedure 'sp_changedbowner' You can do this in a way similar to the following example: First make sure the new owner is not already a database USEr of our example database db1. Secondly, go to the database db1, using the Query Analyzer, AND execute the commAND: sp_changedbowner ‘new_owner’ 7.4 Renaming a database: ------------------------ Suppose you want to change the name of a database, say 'dev1' into 'dev2'. It's best to change the database in single USEr mode first. Now you can change the database name using the Query Analyzer, by executing the commAND sp_renamedb ‘dev1’, ‘dev2’ ---------------------------------------------------------------------- 8. SCRIPTS FOR RETRIEVING PROPERTIES OF INDEXES AND PK-FK CONSTRAINTS: ---------------------------------------------------------------------- --------------------------------------------------------- In this section, we might demonstrate some object properties. So, for illustration purposes, consider the following: suppose you have a database called 'db1', with the database USEr 'charlie', who has enough permissions to create tables AND indexes. Suppose further that charlie logs on, AND creates the following tables create table customers ( custid int not null, custname varchar(10), CONSTRAINT pk_cust PRIMARY KEY (custid) ) create table contacts ( contactid int not null, custid int, contactname varchar(10), CONSTRAINT pk_contactid PRIMARY KEY (contactid), CONSTRAINT fk_cust FOREIGN KEY (custid) REFERENCES customers(custid) ) -------------------------------------------------------------------------- 8.1 Overview Foreign Keys AND Referring- AND Referenced tables Query on sysreferences ------------------------------------------------------------------------------------- Many tables in a database will be "linked" by Primary Key AND Foreign Key relationships. Maybe you think it's hANDy to get a list of all Foreign Keys AND the associated referring tables (with the FK), pointing to the referred tables (with the PK). Every USEr database contains the system table 'sysreferences', on which we can USE the following query: SELECT substring(name, 1, 60) as "ForeignKey", substring(object_name(parent_obj), 1, 40) as "TableWithFK" FROM sysobjects o, sysreferences r WHERE o.type='F' AND o.name=object_name(r.constid) SELECT substring(object_name(constid), 1, 40) AS FK, substring(object_name(fkeyid), 1, 40) AS "Referencing Table", substring(object_name(rkeyid), 1, 40) AS "Referenced Table" FROM sysreferences ORDER BY object_name(rkeyid) These queries shows all FK's in your database, plus the associated PK-FK linked tables. So, IF you would USE this query in database db1, you would see the following resultset: FK Referencing Table Referenced Table ------------- ---------------- --------------- fk_cust contacts customers 8.2 To see all Tables with a PK: -------------------------------- SELECT substring(name,1,30) AS "PrimaryKey", id, xtype, object_name(parent_obj) AS "Parent_table" FROM sysobjects WHERE xtype='PK' ORDER BY object_name(parent_obj) 8.3 example of adding AND dropping pk: -------------------------------------- ALTER TABLE ASSET_SMS_EXT DROP CONSTRAINT ASSET_PK ALTER TABLE ASSET_SMS_EXT ADD CONSTRAINT ASSET_PK PRIMARY KEY (DWMACHINEID) 8.4 To DISABLE Foreign Keys in tables that point to a PK in another table: -------------------------------------------------------------------------- With the following query, you can delete one or more rows FROM the table with the PK, without error messages that other tables point with a FK to that PK. This is so because you have DISABLED te FK constraint. You can also enable the FK again after you have finished. DECLARE @FK VARCHAR(128) DECLARE @REFERENCED VARCHAR(128) DECLARE cur1 CURSOR FOR SELECT name, object_name(parent_obj) FROM sysobjects o, sysreferences r WHERE o.type='F' AND o.name=object_name(r.constid) OPEN cur1 FETCH NEXT FROM cur1 INTO @FK, @REFERENCED WHILE (@@fetch_status<>-1) BEGIN EXEC('ALTER TABLE '+@REFERENCED+' NOCHECK CONSTRAINT '+@FK) -- EXEC('ALTER TABLE '+@REFERENCED+' DROP CONSTRAINT '+@FK) FETCH NEXT FROM cur1 INTO @FK, @REFERENCED END CLOSE cur1 DEALLOCATE cur1 8.5 Overview clustered AND nonclustered indexes. Query on sysindexes AND sysobjects. ------------------------------------------------------------------------------------ Every USEr database contains the system tables 'sysindexes', 'sysconstraints' AND 'sysobjects' Sysconstraints registers all defined constraints like PK, FK, CHECK constraints in your database, AND the internal id's of the tables involved. Sysobjects registers all objects like tables, views, procedures, with their names, internal id's, owner etc.. Sysindexes describes all indexes in the database, with their index-id, associated table id, type of index etc.. -Some of the most interesting columns of sysindexes: ---------------------------------------------------- Column name Data type Description id int ID of table to which the index belongs. indid smallint ID of index: 1 = Clustered index >1 = Nonclustered groupid smallint Filegroup ID on which the object was created. dpages int For indid = 0 or indid = 1, dpages is the count of data pages USEd. For indid > 1 it is the count of index pages USEd. rows int rowcount based on indid = 0 AND indid = 1, AND the value is repeated for indid >1. name -Some of the most interesting columns of sysobjects: ---------------------------------------------------- Column name Data type Description name sysname Object name. Id int Object identIFication number type char(2) Object type. Can be one of these object types: C = CHECK constraint D = Default or DEFAULT constraint F = FOREIGN KEY constraint L = Log FN = Scalar function IF = Inlined table-function P = Stored procedure PK = PRIMARY KEY constraint (type is K) RF = Replication filter stored procedure S = System table TF = Table function TR = Trigger U = USEr table UQ = UNIQUE constraint (type is K) V = View X = ExtENDed stored procedure uid smallint USEr ID of owner object. - Some of the most interesting columns of sysconstraints: --------------------------------------------------------- constid int Constraint number. id int ID of the table that owns the constraint. colid smallint ID of the column on which the constraint is defined, 0 IF a table constraint. spare1 tinyint Reserved. status int Bitmap indicating the status. Possible values include: 1 = PRIMARY KEY constraint. 2 = UNIQUE KEY constraint. 3 = FOREIGN KEY constraint. 4 = CHECK constraint. 5 = DEFAULT constraint. 16 = Column-level constraint. 32 = Table-level constraint. Now let's see IF we can build a query, that joins sysindexes AND sysobjects, in order to retreive some interesting information: This query shows all indexes AND tables in a database: -- indid 1 = clustered, indid>1 nonclustered -- indid 0 = table itself SELECT id, substring(object_name(id),1,30), rows, indid, substring(name,1,30) FROM sysindexes WHERE name not like '_WA%' SELECT substring(sysobjects.name,1,30) AS TABLENAME, substring(sysindexes.name,1,30) AS INDEXNAME, sysobjects.id, sysindexes.indid, sysindexes.groupid, sysindexes.rows FROM sysobjects, sysindexes WHERE sysobjects.id=sysindexes.id ORDER BY sysindexes.rows desc This query shows all indexes AND tables in a database WHERE sysobjects.type=U, meaning all normal USEr tables AND indexes: SELECT substring(sysobjects.name,1,30) AS TABLENAME, substring(sysindexes.name,1,30) AS INDEXNAME, sysobjects.id, sysindexes.indid, sysobjects.xtype, sysindexes.rows, (dpages*8192/1024/1024) AS SIZE FROM sysobjects, sysindexes WHERE sysobjects.id=sysindexes.id AND sysindexes.indid >= 1 AND sysobjects.type='U' ORDER BY sysindexes.rows desc 8.6 SCRIPT TO CREATE PK'S FROM UNIQUE INDEXES: ---------------------------------------------- -- SCRIPT PART 1. -- DYNAMICALLY CREATES 'ALTER TABLE .. ADD CONSTRAINT..' STATEMENTS SET NOCOUNT ON DECLARE @TABLENAME VARCHAR(64) DECLARE @INDEXNAME VARCHAR(64) DECLARE @KEYSET VARCHAR(64) DECLARE @I INT DECLARE @J INT create table ##ui_keys ( id int identity(1,1), table_name varchar(128), index_name varchar(128), index_description varchar(128), index_keys varchar(128) ) DECLARE c1 CURSOR FOR SELECT sysobjects.name AS TABLENAME, sysindexes.name AS INDEXNAME FROM sysobjects, sysindexes WHERE sysobjects.id=sysindexes.id AND sysindexes.indid=1 AND sysobjects.xtype='U' AND sysindexes.name NOT LIKE 'PK_%' ORDER BY sysobjects.name OPEN c1 FETCH NEXT FROM c1 INTO @TABLENAME,@INDEXNAME WHILE (@@fetch_status<>-1) BEGIN INSERT ##ui_keys (index_name, index_description, index_keys) exec ('sp_helpindex '+@TABLENAME) UPDATE ##ui_keys SET table_name=@TABLENAME FETCH NEXT FROM c1 INTO @TABLENAME,@INDEXNAME END CLOSE c1 DEALLOCATE c1 DELETE FROM ##ui_keys WHERE index_description LIKE 'nonclustered%' SELECT @J=(SELECT MAX(id) FROM ##ui_keys) SELECT @I=1 WHILE @I <= @J BEGIN SELECT @TABLENAME=(SELECT table_name FROM ##ui_keys WHERE id=@I) SELECT @INDEXNAME=(SELECT index_name FROM ##ui_keys WHERE id=@I) SELECT @KEYSET =(SELECT index_keys FROM ##ui_keys WHERE id=@I) PRINT 'ALTER TABLE '+@TABLENAME+' ADD CONSTRAINT '+@INDEXNAME+' PRIMARY KEY '+'('+@KEYSET+')' PRINT 'GO' SELECT @I=@I+1 END 8.7 Script to show dependent tables of a parent table: ------------------------------------------------------- create procedure show_dependents @tabname varchar(64) AS DECLARE @I INT DECLARE @J INT DECLARE @CHILD VARCHAR(64) BEGIN if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[CHILDS]') and OBJECTPROPERTY(id, N'IsUserTable') = 1) drop table [dbo].[CHILDS] CREATE TABLE CHILDS ( FK_NAME VARCHAR(64), TABPARENT VARCHAR(64), TABCHILD VARCHAR(64), L_DISTANCE CHAR(1) ) INSERT CHILDS (FK_NAME,TABPARENT,TABCHILD,L_DISTANCE) SELECT object_name(constid), object_name(rkeyid), object_name(fkeyid),'1' FROM sysreferences WHERE object_name(rkeyid)=@tabname SELECT @I=(SELECT COUNT(*) FROM CHILDS) DECLARE cur1 CURSOR STATIC FOR SELECT TABCHILD FROM CHILDS OPEN cur1 FETCH NEXT FROM cur1 INTO @CHILD WHILE (@@fetch_status<>-1) BEGIN IF @CHILD<>@tabname BEGIN INSERT CHILDS (FK_NAME,TABPARENT,TABCHILD,L_DISTANCE ) SELECT object_name(constid), object_name(rkeyid), object_name(fkeyid),'2' FROM sysreferences WHERE object_name(rkeyid)=@CHILD END FETCH NEXT FROM cur1 INTO @CHILD END CLOSE cur1 DEALLOCATE cur1 SELECT * FROM CHILDS END ---------------------------------------------------------------------- 9. Other USEfull dba queries: ---------------------------------------------------------------------- Example 9.1: ------------ -- The system table msdb.dbo.backupset, registers all backups of your databases. -- So, you can get a list of backups FROM a certain date as -- shown in the following example USE msdb SELECT backup_start_date, backup_finish_date, media_set_id, type, database_name FROM backupset WHERE backup_start_date>'2008-06-01' USE msdb SELECT backup_set_id, media_set_id, catalog_family_number, software_vENDor_id, USEr_name, backup_start_date, backup_finish_date, type, database_name FROM backupset /* WHERE backup_start_date > "your_date_of_choice" */ SELECT substring(s.database_name,1,20) as "database", (s.backup_size/1024/1024) as "Size_in_MB", s.type, s.backup_start_date, s.backup_finish_date, substring(f.physical_device_name,1,30) FROM backupset s, backupmediafamily f WHERE s.media_set_id=f.media_set_id AND s.backup_start_date > '2003-07-01' ORDER BY s.backup_start_date SELECT substring(s.database_name,1,20) as "database", (s.backup_size/1024/1024) as "Size_in_MB", s.type, s.backup_start_date, s.backup_finish_date, substring(f.physical_device_name,1,30) as "Device", s.first_lsn, s.last_lsn, database_backup_lsn FROM backupset s, backupmediafamily f WHERE s.media_set_id=f.media_set_id AND s.backup_start_date > '2003-07-15' ORDER BY s.backup_start_date -- Just get the latest backup date as recorded in the msdb database SELECT max(backup_finish_date) FROM backupset Example 9.2: ------------ -- IF you have created jobs, or have installed replication, -- the tables msdb.dbo.sysjobs AND msdb.dbo.sysjobsteps -- describes interesting properties about your jobs. USE msdb SELECT sysjobs.job_id, substring(sysjobs.name, 1, 30) AS JOBNAME, sysjobs.enabled, sysjobsteps.step_id, substring(sysjobsteps.step_name, 1, 20) AS STEPNAME, substring(sysjobsteps.commAND, 1, 20) AS COMMAND FROM sysjobs, sysjobsteps WHERE sysjobs.job_id=sysjobsteps.job_id SELECT i.job_id, substring(i.name,1,40) as jobname, step_id, substring(step_name, 1, 40) as stepname FROM sysjobs i, sysjobsteps j WHERE i.job_id=j.job_id Example 9.3: ------------ -- Automated Reindexing of all USEr tables in a database -- Here, DBCC DBREINDEX() is USEd for reindexing -- You should only using this method when your database -- is free FROM USEr activity, AND no batch processing should run -- Be carefull using the following script in a very large database -- BEGIN OF SCRIPT -- Go to the database of interest DECLARE @tabname VARCHAR(40) DECLARE cur1 CURSOR FOR SELECT sysobjects.name FROM sysobjects, sysindexes WHERE sysobjects.id=sysindexes.id AND sysobjects.type='U' OPEN cur1 FETCH NEXT FROM cur1 INTO @tabname WHILE (@@fetch_status<>-1) BEGIN EXEC ('DBCC DBREINDEX('+@tabname+')') PRINT 'REINDEXING '+@tabname FETCH NEXT FROM cur1 INTO @tabname END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT Example 9.4: ------------ -- Automated UPDATE STATISTICS commAND for all your USEr tables -- BEGIN OF SCRIPT DECLARE @tabname VARCHAR(40) DECLARE cur1 CURSOR FOR SELECT name FROM sysobjects WHERE type='U' OPEN cur1 FETCH NEXT FROM cur1 INTO @tabname WHILE (@@fetch_status<>-1) BEGIN EXEC ('UPDATE STATISTICS '+@tabname) FETCH NEXT FROM cur1 INTO @tabname END CLOSE cur1 DEALLOCATE cur1 -- END OF SCRIPT Example 9.5: ------------ -- Running automated scripts: You can run scripts FROM the commAND line as follows: E:\MSSQL7\BINN>osql –E –i show_dboptions.sql > dboptions.txt -E : use a trusted connection -Uusername –Ppassword : Or use an internal sqlserver account -i : switch to specIFy the inputscript Example 9.6: ------------- Generating backup commands: -- SCRIPT FOR GENERATING CORRECT BACKUP COMMANDS -- FOR DATABASES ON SGHDRC12 -- VERSION 2.0 -- DATE 27-01-2004 SET NOCOUNT ON DECLARE @NAME VARCHAR(128) DECLARE @DATUM DATETIME DECLARE @BACKUP_DATUM VARCHAR(128) SELECT @DATUM=GETDATE() SELECT @BACKUP_DATUM=CONVERT(VARCHAR(10),@DATUM,20) -- NU DE DATABASENAMEN OPHALEN UIT DE DICTIONARY DECLARE c1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases WHERE name not like '%AIDA%' OPEN c1 FETCH NEXT FROM c1 INTO @NAME WHILE (@@fetch_status<>-1) BEGIN PRINT 'BACKUP DATABASE '+@NAME+' TO DISK=''d:\backup\local_dbs\'+@NAME+'_'+@BACKUP_DATUM+'.dmp' PRINT 'GO' FETCH NEXT FROM c1 INTO @NAME END CLOSE c1 DEALLOCATE c1 -- END OF FILE ---------------------------------------------------------------------- 9. Full-Text Catalogs: ---------------------------------------------------------------------- 1. First, enable a database for Full-Text catalogs: --------------------------------------------------- Syntax: sp_fulltext_database [@action =] 'action' action=enable: enable Enables full-text indexing within the current database. Important Use carefully. If full-text catalogs already exist, this procedure drops all full-text catalogs, re-creates any full-text indexing indicated in the system tables, and marks the database as full-text enabled. This action does not cause index population to begin; an explicit start_full or start_incremental on each catalog must be issued using sp_fulltext_catalog to populate or repopulate the full-text index. action=disable: disable Removes all full-text catalogs in the file system for the current database and marks the database as being disabled for full-text indexing. This action does not change any full-text index metadata at the full-text catalog or table level. Example: This example enables full-text indexing for the Northwind database. USE Northwind EXEC sp_fulltext_database 'enable' 2. Enable one or more tables to be full-text indexed: ----------------------------------------------------- This means that for a table a PK or Unique key is needed. Secondly, you have a characterbased column on which you want the full-text search to apply on. The following procedure marks or unmarks a table for full-text indexing. Syntax: sp_fulltext_table [ @tabname = ] 'qualified_table_name' , [ @action = ] 'action' [ , [ @ftcat = ] 'fulltext_catalog_name' , [ @keyname = ] 'unique_index_name' ] action can be Create, Drop, Activate etc.. Example: USE Northwind EXEC sp_fulltext_table 'Categories', 'create', 'Cat_Desc', 'PK_Categories' .. Add some columns EXEC sp_fulltext_column 'Categories','Description','add' .. Activate the index EXEC sp_fulltext_table 'Categories','activate' 3. Enable a column to participate in full-text indexing: -------------------------------------------------------- The following procedure specifies whether or not a particular column of a table participates in full-text indexing. sp_fulltext_column [ @tabname = ] 'qualified_table_name' , [ @colname = ] 'column_name' , [ @action = ] 'action' [ , [ @language = ] 'language' ] [ , [ @type_colname = ] 'type_column_name' ] Example: This example adds the Description column from the Categories table to the table's full-text index. USE Northwind EXEC sp_fulltext_column Categories, Description, 'add' 4. Now start populating the catalog: ------------------------------------ sp_fulltext_catalog Creates and drops a full-text catalog, and starts and stops the indexing action for a catalog. Multiple full-text catalogs can be created for each database. Syntax sp_fulltext_catalog [ @ftcat = ] 'fulltext_catalog_name' , [ @action = ] 'action' [ , [ @path = ] 'root_directory' ] Examples: A. Create a full-text catalog This example creates an empty full-text catalog, Cat_Desc, in the Northwind database. USE Northwind EXEC sp_fulltext_catalog 'Cat_Desc', 'create' B. To rebuild a full-text catalog This example rebuilds an existing full-text catalog, Cat_Desc, in the Northwind database. USE Northwind EXEC sp_fulltext_catalog 'Cat_Desc', 'rebuild' C. Start the population of a full-text catalog This example begins a full population of the Cat_Desc catalog. USE Northwind EXEC sp_fulltext_catalog 'Cat_Desc', 'start_full' D. Stop the population of a full-text catalog This example stops the population of the Cat_Desc catalog. USE Northwind EXEC sp_fulltext_catalog 'Cat_Desc', 'stop' E. To remove a full-text catalog This example removes the Cat_Desc catalog. USE Northwind EXEC sp_fulltext_catalog 'Cat_Desc', 'drop' ---------------------------------------------------------------------- 11. Stored procedure examples, and code examples you can use in sp's: ---------------------------------------------------------------------- 11.1 Typical stored procedure with input parameters: ---------------------------------------------------- Typical sp, that might be used at some form of orderentry, here shown as a typical example: CREATE procedure orderentry @order_id int, @cust_name varchar(20), @product varchar(20), @quantity int AS BEGIN TRAN orders INSERT INTO orders (order_id, cust_name) values (@order_id, @cust_name) IF @@error=0 BEGIN INSERT INTO orderdetail (order_id, product, quantity) values (@order_id, @product, @quantity) COMMIT TRAN orders END ELSE BEGIN ROLLBACK TRAN orders RAISERROR ('Orderentry did not succeed.', 16, 1) WITH LOG PRINT 'Orderentry did not succeed due to errors.' END GO You can execute the sp in the following way: exec orderentry 5, 'Intel', 'chips', 10 11.2 Stored procedure with input AND output parameters: ------------------------------------------------------- CREATE PROCEDURE mathtutor @m1 int, @m2 int, @result int OUTPUT AS SET @result=@m1 * @m2 ---- Now execute the sp: You may not just do exec mathtutor 2, 5 The right way to use the sp is as follows: DECLARE @answer int EXECUTE mathtutor 2, 5, @answer OUTPUT SELECT 'the result is: ', @answer 11.3 Example of a system stored procedure: ------------------------------------------ You can use a sp to add a job to SQL Server: DECLARE @JobID BINARY(16) DECLARE @ReturnCode INT SELECT @ReturnCode = 0 EXECUTE @ReturnCode = msdb.dbo.sp_add_job @job_id = @JobID OUTPUT , @job_name = N'STEP1_input_data', @owner_login_name = N'W2KSQL\Administrator', @description = N'No description available.', @category_name = N'[Uncategorized (Local)]', @enabled = 0, @notIFy_level_email = 0, @notIFy_level_page = 0, @notIFy_level_netsEND = 0, @notIFy_level_eventlog = 0, @DELETE_level= 0 Add a jobstep to the job: EXECUTE @ReturnCode = msdb.dbo.sp_add_jobstep @job_id = @JobID, @step_id = 1, @step_name = N'Step1', @commAND = N'e:\synchro\bin\input.bat', @database_name = N'', @server = N'', @database_user_name = N'', @subsystem = N'CmdExec', @cmdexec_success_code = 0, @flags = 0, @retry_attempts = 0, @retry_interval = 1, @output_file_name = N'', @on_success_step_id = 0, @on_success_action = 1, @on_fail_step_id = 0, @on_fail_action = 2 11.5: get the username or applicationname ----------------------------------------- SELECT SUSER_NAME() -- is sql 7 syntax, in sql2k use SUSER_SNAME SELECT SUSER_SNAME() -- returns nt login IF nt, sql login IF sql SELECT USER_NAME() -- returns sql login / database user SELECT SESSION_USER -- returns sql login SELECT HOST_ID() SELECT HOST_NAME() -- return machinename SELECT CURRENT_USER -- returns sql login / database user SELECT APP_NAME() -- returns application SELECT SUSER_SID() -- returns SID SELECT SUSER_SNAME() -- returns nt IF nt, return sql IF sql SELECT USER_ID() -- Returns a user's database identIFication number. SELECT @@spid -- returns spid SELECT @@servername SELECT @@servicename -- sql login / database user DECLARE @session_usr char(30) SET @session_usr = SESSION_USER SELECT 'This session''s current user is: '+ @session_usr GO -- returns nt login IF nt, or sql login IF sql DECLARE @sys_usr char(30) SET @sys_usr = SYSTEM_USER SELECT 'The current system user is: '+ @sys_usr GO -- database user DECLARE @usr char(30) SET @usr = user SELECT 'The current user''s database username is: '+ @usr GO SELECT substring(nt_username, 1, 15) AS "USERNAME (NT or NULL)", substring(loginame, 1, 20) AS "LOGINNAME (NT or SQL)", substring(hostname, 1, 15) AS "HOSTNAME", substring(program_name, 1, 40) AS "PROGRAM" FROM master.dbo.sysprocesses SELECT spid AS "SQL process ID", kpid AS "NT thread ID", ecid AS "Execution context ID", nt_username, loginame FROM master.dbo.sysprocesses 11.6: can be used at errorhANDling ---------------------------------- IF (@@TRANCOUNT > 0) BEGIN RAISERROR() END IF (@@ROWCOUNT = 0) BEGIN RAISERROR() END IF @@error > 0 BEGIN GOTO error_section END . . error_section: PRINT @MESSAGE RETURN GO 11.7: set a value IF it's null ------------------------------ IF (@Volgnr IS NULL ) BEGIN SET @Volgnr = 1 END ELSE BEGIN SET @Volgnr = @Volgnr + 1 END 11.8: INSERT variable values into a table ----------------------------------------- INSERT AfschrIFt (AfschrIFtId, RekeningId, VolgNr, Type, EindDatum) VALUES (@R, @RekeningId, @VolgNr, 'TR', CONVERT(smalldatetime, CONVERT(varchar(10), GetDate(), 112), 112)) 11.9: IF ON, "x" IF x is an object, it cannot be used in DDL ------------------------------------------------------------- SET QUOTED_IDENTIFIER OFF SET ANSI_NULLS ON GO 11.10: Using xp_cmdshell AND bcp or other external cmd: ------------------------------------------------------- Example 1: ---------- SELECT @LOGSTRING=@ART_NR+' :'+@ACTION+' '+CONVERT(VARCHAR(64),@ACTION_DATE) SELECT @log_cmd='echo'+' '+@LOGSTRING+' >> C:\TEMP\LOAD_FILE.LOG' EXEC master.dbo.xp_cmdshell @log_cmd Example 2: ---------- SELECT @totalcommAND='bcp ##BCP_LOAD in '+@importpath+' -c -F2 -T' EXEC @RESULT = master.dbo.xp_cmdshell @totalcommAND IF (@RESULT <> 0) BEGIN SET @MESSAGE='Error loading data in temporary table. Possibly wrong path or file not found.' GOTO error_section END Example 3: ---------- DECLARE @cmd sysname, @var sysname SET @var = 'Hello world' SET @cmd = 'echo ' + @var + ' > var_out.txt' EXEC master..xp_cmdshell @cmd DECLARE @cmd sysname, @var sysname SET @var = 'dir/p' SET @cmd = @var + ' > dir_out.txt' EXEC master..xp_cmdshell @cmd Example 4: ---------- WHILE (@@FETCH_STATUS<>-1) BEGIN SELECT @length_table =LEN(@TEMPTAB) SELECT @TEMPTAB2=substring(@TEMPTAB,3,@length_table-2) SELECT @TEMPTAB2 SELECT @totalcmd='c:\exp_solid\solexp -o c:\exp_solid\'+@TEMPTAB2+'.txt '+' "shm zg39bbv"'+' zg39bbv zg39bbv '+@TEMPTAB2 EXEC master..xp_cmdshell @totalcmd FETCH NEXT FROM cur1 INTO @TEMPTAB END Example 5: ---------- DECLARE @FileName varchar(50), @bcpCommand varchar(2000) SET @FileName = REPLACE('c:\authors_'+CONVERT(char(8),GETDATE(),1)+'.txt','/','-') SET @bcpCommand = 'bcp "SELECT * FROM pubs..authors ORDER BY au_lname" queryout "' SET @bcpCommand = @bcpCommand + @FileName + '" -U garth -P pw -c' EXEC master..xp_cmdshell @bcpCommand Other notes: ------------ By default, xp_cmdshell can only be used by logins in the sysadmin server role. When a login that's in the sysadmin role executes xp_cmdshell, it runs under the windows account that SQL Server is running under. When you grant the right to run xp_cmdshell to a login that is not in the sysadmin role, you must set the account that is used to run xp_cmdshell and any programs that it invokes. This is done with the extended stored procedure xp_sqlagent_proxy_account. Suppose that you want to grant the right to execute xp_cmdshell to the SQL login LimitedUser. You'll need an NT account to execute the program. Here's the script: use master go xp_sqlagent_proxy_account N'SET' , N'' , N'' , N'' go -- retrieve the proxy account to check that it's correct. xp_sqlagent_proxy_account N'GET' go -- grant database access in master sp_grantdbaccess 'LimitedUser' go grant exec on xp_cmdshell to LimitedUser go Also, you may go into Enterprise Manager and bring up the property page for SQL Agent. Click on the "Job System" tab. In the Non-SysAdmin job step proxy account area of the page, clear the check box Only users with SysAdmin privileges can execute CmdExec and ActiveScripting job steps. You can fill in an account here. --Corresponds to the Enterprise Manager SQL Agent property page -- Job System tab. Sets the value of "Only users with SysAdmin -- privileges can execute CmdExec and ActiveScripting job steps" -- 1 Turns on the restriction -- 0 Turns off the restriction and allows non sysadmin logins -- to do this and to run xp_cmdshell EXECUTE msdb..sp_set_sqlagent_properties @sysadmin_only = 0 go Be aware that Service Pack 3 changes the behavior of SQL Server by making the permission for non-sysadmin accounts to use xp_cmdshell conditional on the value of the Only users with SysAdmin privileges can execute CmdExec and ActiveScripting job steps flag. Custom xp_cmdshell: ------------------- CREATE PROCEDURE xp_cmdshell(@cmd varchar(255), @Wait int = 0) AS --Create WScript.Shell object DECLARE @result int, @OLEResult int, @RunResult int DECLARE @ShellID int EXECUTE @OLEResult = sp_OACreate 'WScript.Shell', @ShellID OUT IF @OLEResult <> 0 SELECT @result = @OLEResult IF @OLEResult <> 0 RAISERROR ('CreateObject %0X', 14, 1, @OLEResult) EXECUTE @OLEResult = sp_OAMethod @ShellID, 'Run', Null, @cmd, 0, @Wait IF @OLEResult <> 0 SELECT @result = @OLEResult IF @OLEResult <> 0 RAISERROR ('Run %0X', 14, 1, @OLEResult) --If @OLEResult <> 0 EXEC sp_displayoaerrorinfo @ShellID, @OLEResult EXECUTE @OLEResult = sp_OADestroy @ShellID return @result 11.11: check for a precondition value before some action takes place -------------------------------------------------------------------- DECLARE @message VARCHAR(100) IF (SELECT BLOK_CODE FROM DSA_IMPORT..BLOKKADE) = 'Y' BEGIN SELECT @message = 'error: blocking flag is Y' goto error_section END 11.12: days of week ------------------- SET DATEFIRST 5 SELECT @@DATEFIRST AS '1st Day', DATEPART(dw, GETDATE()) AS 'Today' Here is the result set. Counting FROM Friday, today (Saturday) is day 2. 1st Day Today ---------------- -------------- 5 2 Or see this sample: SELECT @datumset = @@DATEFIRST set DATEFIRST 1 SELECT @startdatum = @nextdat - (DATEPART(dw, @nextdat)-1) SELECT @einddatum = @nextdat + (7 - DATEPART(dw,@nextdat)) SET DATEFIRST @datumset INSERT INTO DESTINATION_TABLE SELECT * FROM SOURCE_TABLE WHERE DATE_CRITERIUM between @nextdat AND @einddatum Or see this sample: set datefirst 1 declare @dag varchar(10) set @dag = (SELECT dag = case when datepart(dw,getdate()) = 1 then 'MA' when datepart(dw,getdate()) = 2 then 'DI' when datepart(dw,getdate()) = 3 then 'WOE' when datepart(dw,getdate()) = 4 then 'DO' when datepart(dw,getdate()) = 5 then 'VR' else 'onbekEND' END) SELECT @dag 11.13: keeping the no of transactions manageble ----------------------------------------------- DECLARE @i INT DECLARE @j INT DECLARE @k INT SET @i=0 SET @j=0 WHILE (@i<10000) BEGIN SET @J=0 BEGIN TRAN WHILE (@j<100) BEGIN INSERT INTO y values (@i,'joop') SELECT @i=@i+1 SELECT @j=@j+1 END COMMIT -- just inspect those integers SELECT @k=(SELECT COUNT(*) FROM y) SELECT @k SELECT @i -- END inspection END 11.14: Use of a user defined function ------------------------------------- User defined functions can accept parameters AND return either a scalar value or a table. CREATE FUNCTION udfEmpByCity(@city varchar(20)) RETURNS TABLE AS RETURN ( SELECT EmployeeID, LastName, FirstName FROM Employees WHERE (City=@City) ) To access the resultset of this function you can use the next example: SELECT * FROM udfEmpByCity('Seattle') 11.15: a compare option ----------------------- DECLARE @I INT DECLARE @J INT DECLARE @NEXTID VARCHAR(10) DECLARE @ID VARCHAR(20) DECLARE @SMS VARCHAR(64) SELECT @J = (SELECT COUNT(*) FROM SOFTCODE) SELECT @I = 0 WHILE @I < @J BEGIN SELECT @I=@I+1 SELECT @ID =(SELECT ID FROM SOFTCODE WHERE IDENT=@I) SELECT @SMS =(SELECT DISTINCT packagename FROM packages WHERE packagename like '%'+'ID='+'%'+@ID+'%') UPDATE SOFTCODE SET SOFTCODE.SMSSOFT=@SMS FROM SOFTCODE WHERE SOFTCODE.ID=@ID END 11.17: remove AND add primary key --------------------------------- ALTER TABLE ASSET_SMS_EXT DROP CONSTRAINT ASSET_PK ALTER TABLE ASSET_SMS_EXT ADD CONSTRAINT ASSET_PK PRIMARY KEY (DWMACHINEID) ALTER TABLE SMSMACHINES ADD CONSTRAINT SMSMACHINES_PK PRIMARY KEY (DWMACHINEID) ALTER TABLE SMSSW ADD CONSTRAINT SMSSW_FK FOREIGN KEY (DWMACHINEID) REFERENCES SMSMACHINES(DWMACHINEID BEGIN EXEC('ALTER TABLE '+@tab_name+' NOCHECK CONSTRAINT '+@fk_name) EXEC('ALTER TABLE '+@tab_name+' DROP CONSTRAINT '+@fk_name) FETCH NEXT FROM cur1 INTO @fk_name, @tab_name END 11.18: tricky procedure to create a dIFference table between 2 tables --------------------------------------------------------------------- CREATE PROCEDURE DELTA_SMS_CMDB @MYVAR VARCHAR(20) AS /* CLEANUP TEMPORY TABLES */ TRUNCATE TABLE SMSSOFT TRUNCATE TABLE CMDBSOFT TRUNCATE TABLE DELTA_SMS /* PUT DATA INTO TABLES */ INSERT INTO SMSSOFT (NAME, SERIALNUMBER, SOFTWARE) SELECT DISTINCT SHORTNAME, SERIALNUMBER, SOFTWARE FROM SMSSOFTWARE WHERE SOFTWARE LIKE '%'+@MYVAR+'%' AND SERIALNUMBER IS NOT NULL ORDER BY SHORTNAME INSERT INTO CMDBSOFT (NAME, SERIALNUMBER, SOFTWARE) SELECT DISTINCT NAME, SERIALNUMBER, SOFTWARE FROM CMDBSOFTWARE WHERE SOFTWARE LIKE '%'+@MYVAR+'%' AND SERIALNUMBER IS NOT NULL ORDER BY NAME /* DECLARE NEEDED VARIABLES IN OUR CODE */ DECLARE @i INT DECLARE @j INT DECLARE @p INT DECLARE @t INT DECLARE @nameA VARCHAR(20) DECLARE @nameB VARCHAR(20) SELECT @p=(SELECT COUNT(*) FROM CMDBSOFT) IF @p=0 GOTO EINDE DECLARE CUR1 CURSOR FOR SELECT SMSSOFT.SERIALNUMBER FROM SMSSOFT OPEN CUR1 FETCH NEXT FROM CUR1 INTO @nameA WHILE (@@FETCH_STATUS <> -1) BEGIN INSERT INTO DELTA_SMS (NAME, SERIALNUMBER, SOFTWARE) SELECT DISTINCT SMSSOFT.NAME, SMSSOFT.SERIALNUMBER, SMSSOFT.SOFTWARE FROM SMSSOFT, CMDBSOFT WHERE (SMSSOFT.SERIALNUMBER=@nameA) AND (CMDBSOFT.SERIALNUMBER<>@nameA) FETCH NEXT FROM CUR1 INTO @NAMEA END CLOSE CUR1 DEALLOCATE CUR1 /* NOW MAKE THE DIFFERENCE WITH CMDBSOFT, THAT IS CMDBSOFT WITH ONLY @MYVAR */ DECLARE CUR2 CURSOR FOR SELECT CMDBSOFT.SERIALNUMBER FROM CMDBSOFT OPEN CUR2 FETCH NEXT FROM CUR2 INTO @nameB WHILE (@@FETCH_STATUS <> -1) BEGIN DELETE FROM DELTA_SMS WHERE SERIALNUMBER=@nameB FETCH NEXT FROM CUR2 INTO @NAMEB END CLOSE CUR2 DEALLOCATE CUR2 /* NOW CREATE NEW TABLE WITH @MYVAR DATA */ SELECT @t=(SELECT ID FROM SYSOBJECTS WHERE NAME="DELTA_SMS_'+@MYVAR+'") IF @t=0 GOTO PROC1 ELSE GOTO PROC2 PROC1: EXECUTE ('CREATE TABLE DELTA_SMS_'+@MYVAR+ '( NAME VARCHAR(20) NOT NULL, SERIALNUMBER VARCHAR(30) NULL, SOFTWARE VARCHAR(64) NULL, DEEP INT IDENTITY(1,1) NOT NULL)') EXECUTE ('INSERT INTO DELTA_SMS_'+@MYVAR+ ' (NAME, SERIALNUMBER, SOFTWARE) SELECT DISTINCT NAME, SERIALNUMBER, SOFTWARE FROM DELTA_SMS') GOTO EINDE2 PROC2: EXECUTE ('DROP TABLE DELTA_SMS_'+@MYVAR+'') EXECUTE ('CREATE TABLE DELTA_SMS_'+@MYVAR+ '( NAME VARCHAR(20) NOT NULL, SERIALNUMBER VARCHAR(30) NULL, SOFTWARE VARCHAR(64) NULL, DEEP INT IDENTITY(1,1) NOT NULL)') EXECUTE ('INSERT INTO DELTA_SMS_'+@MYVAR+ ' (NAME, SERIALNUMBER, SOFTWARE) SELECT DISTINCT NAME, SERIALNUMBER, SOFTWARE FROM DELTA_SMS') GOTO EINDE2 EINDE: SELECT "TABLE CMDBSOFTWARE DOES NOT CONTAIN THIS PARTICULAR SOFTWARE: " SELECT @myvar SELECT "THIS MEANS THAT TABLE SMSSOFTWARE QUERIED ON THIS PARTICULAR SOFTWARE " SELECT "IS THE DIFFERENCE LIST." RETURN EINDE2: RETURN 11.19: Some CASE WHEN examples: ------------------------------- WHILE (SELECT STA_VERWERKING FROM STATUS_CSO) in ('Y','J') BEGIN END ----- declare @v1 varchar(10) SELECT @v1=(SELECT name1 FROM t1 WHERE id=1) SELECT CASE @v1 WHEN 'appie1' THEN 'dit is appie1' WHEN 'appie2' THEN 'dit is appie2' ELSE 'Gerrit' END ----- CREATE procedure SessionConfig1 AS SELECT tbl_NetworkSessionTable.[HSS Ident], tbl_NetworkSessionTable.Identnr, tbl_NetworkSessionTable.Value, tbl_NetworkSessionTable.SessionId, tbl_SerialnumberReference.Serialnumber, tbl_SerialnumberReference.SerialnumbersettingID, tbl_SerialnumberReference.type, tbl_SerialnumberReference.[Offset value1], tbl_SerialnumberReference.[Offset value2], (SELECT CASE substring([HSS ident],1,3) WHEN 'ECU' THEN substring([HSS ident],4,1) ELSE '' END) AS Position FROM tbl_NetworkSessionTable LEFT JOIN tbl_SerialnumberReference ON tbl_NetworkSessionTable.Value=tbl_SerialnumberReference.Serialnumber ----- UPDATE KP_VPT_RELATIES_SBL SET VRSB_SOORT_MUTATIE = CASE WHEN RSBL_BRONSYSTEEM IS NULL THEN 'I' WHEN RSBL_BRONSYSTEEM IS NOT NULL THEN 'U' END FROM KP_VPT_RELATIES_SBL LEFT JOIN DW_VPT_RELATIES_SBL ON RSBL_BRONSYSTEEM = VRSB_BRONSYSTEEM AND RSBL_CLIENTNUMMER = VRSB_CLIENTNUMMER AND RSBL_RELATIEROL_CODE = VRSB_RELATIEROL_CODE WHERE UPPER(VRSB_STATUS_CODE) = 'N' AND UPPER(VRSB_SOORT_MUTATIE) in ('I','U') ----- 11.20: Wait in procedure: ------------------------- SELECT @l_tijdlus = '000:00:05' WAITFOR DELAY @l_tijdlus 11.21: Use of sENDmail: ----------------------- Example 1 --------- declare @rc int exec xp_cmdshell 'osql -SServername -E -q pkinfo.dbo.csp_pktrap_s -o f:\Reports\report.txt' exec @rc = master.dbo.xp_smtp_sENDmail @FROM = N'ServerX', @FROM_NAME = N'DBA OPERATIONS', @TO = N'A.Brown@xyzcompany.com', @CC = N'B.Blackj@xyzcompany.com', @BCC = N'', @priority = N'NORMAL', @subject = N'Weekly report about...', @message = N'The content of: csp_pktrap_s', @messagefile = N'', @type = N'text/plain', @attachment = N'f:\Reports\report.txt', @attachments = N'', @codepage = 0, @server = N'SVR' SELECT RC = @rc go Example 2 --------- Use xp_sENDmail with no variables This example sENDs a message to user Robert King (e-mail is robertk) that the master database is full. EXEC xp_sENDmail 'robertk', 'The master database is full.' Example 3 --------- Use xp_sENDmail with variables This example sENDs the message to users Robert King AND Laura Callahan (e-mail is laurac), with copies sent to Anne Dodsworth (e-mail is anned) AND Michael Suyama (e-mail is michaels). It also specIFies a subject line for the message. EXEC xp_sENDmail @recipients = 'robertk;laurac', @message = 'The master database is full.', @copy_recipients = 'anned;michaels', @subject = 'Master Database Status' 11.22 Transactions: ------------------- BEGIN TRAN xyx DELETE oas_grplist WHERE grpcode = '1RFC22642' SELECT @l_rowcount = @@rowcount ,@l_error = @@error IF @l_error <> 0 BEGIN PRINT 'FOUT OPGETREDEN BIJ UITVOERING - GEEN DELETE UITGEVOERD' ROLLBACK TRAN xyz GOTO END_section END IF @l_rowcount <> @p_maxaantal BEGIN PRINT 'VERKEERDE AANTAL RIJEN GESELECTEERD - ' + CAST(@l_rowcount AS varchar) + ' - GEEN DELETE UITGEVOERD' ROLLBACK TRAN grootboeksysteem_DELETE_001 GOTO END_section END COMMIT TRAN xyz PRINT 'DELETE GROOTBOEKSYSTEEM UITGEVOERD VOOR ' + CAST(@l_rowcount AS varchar) + ' RIJEN.' END_section: RETURN END GO 11.23 Assign permission to an object: ------------------------------------- example: grant SELECT,INSERT,update on table1 to user_test 11.24 Linked Server: -------------------- sp_addlinkedserver [ @server = ] 'server' -- local name [ , [ @srvproduct = ] 'product_name' ] [ , [ @provider = ] 'provider_name' ] [ , [ @datasrc = ] 'data_source' ] [ , [ @location = ] 'location' ] [ , [ @provstr = ] 'provider_string' ] [ , [ @catalog = ] 'catalog' ] Example 1: ---------- This example creates a linked server named SEATTLESales that uses the Microsoft OLE DB Provider for SQL Server. USE master GO EXEC sp_addlinkedserver 'SEATTLESales', N'SQL Server' GO Example 2: ---------- This example creates a linked server named 'TESTsrv' that uses the Microsoft OLE DB Provider for Oracle AND assumes that the SQL*Net alias for the Oracle database is 'airm'. USE master GO EXEC sp_addlinkedserver @server = 'TESTsrv', @srvproduct = 'Oracle', @provider = 'MSDAORA', -- sql*net alias @datasrc = 'airm' GO EXEC sp_addlinkedsrvlogin 'TESTsrv', false, 'w2ksql\Administrator', 'piet', 'piet' GO Example 3: ---------- This example creates a linked server named 'TestAccess' to MS Access. EXEC sp_addlinkedserver @server = 'TestAccess', @provider = 'Microsoft.Jet.OLEDB.4.0', @srvproduct = 'OLE DB Provider for Jet', @datasrc = 'C:\MSOffice\Access\Samples\Northwind.mdb' GO To query tables of piet: ------------------------ SELECT * FROM TESTsrv..PIET.ABC INSERT INTO TESTsrv..PIET.ABC values (10,'lukt het') Drop the linked server: ----------------------- drop the linked server: sp_droplinkedsrvlogin 'TESTsrv', 'w2ksql\Administrator' sp_dropserver 'TESTsrv' Example 4: ---------- Execute sp_addlinkedserver to create the linked server, specIFying MSDAORA as provider_name, AND the SQL*Net alias name for the Oracle database instance as data_ source. This example assumes that an SQL*Net alias name has been defined as OracleDB. sp_addlinkedserver 'OrclDB', 'Oracle', 'MSDAORA', 'OracleDB' Use sp_addlinkedsrvlogin to create login mappings FROM SQL Server logins to Oracle logins. This example maps the SQL Server login Joe to the linked server defined in the former step using the Oracle login AND password OrclUsr AND OrclPwd: sp_addlinkedsrvlogin 'OrclDB', false, 'Joe', 'OrclUsr', 'OrclPwd' Example 5: ---------- CREATE PROCEDURE set_link @alias nvarchar(50) AS /* Creates a linked server TO ORACLE database */ SET NOCOUNT ON -- IF exists must drop IF EXISTS (SELECT * FROM master.dbo.sysservers WHERE srvname = 'OPC_TEMP_LINK_SVR') BEGIN EXEC sp_droplinkedsrvlogin 'OPC_TEMP_LINK_SVR', NULL EXEC sp_dropserver 'OPC_TEMP_LINK_SVR' END -- create EXEC sp_addlinkedserver @server = 'OPC_TEMP_LINK_SVR', -- used by all dm stored procs @srvproduct = 'Oracle', @provider = 'MSDAORA', @datasrc = @alias --SQL*Net alias for the Oracle database EXEC sp_addlinkedsrvlogin 'OPC_TEMP_LINK_SVR', 'false', NULL, 'Oracle_User, 'Oracle_Password' -- done GO 11.25 DB options: ----------------- Example: EXEC sp_dboption 'sales', 'offline', 'TRUE' 11.26 fill a table FROM other table: ------------------------------------ Example 1: ---------- Two options to realize this: SELECT * INTO table_b FROM table_a INSERT INTO table_b SELECT * FROM table_a Example 2: ---------- INSERT INTO FILELIST (SERIALNUMBER, FILENAME, FILESIZE, FILEDATE) SELECT DISTINCT SERIALNUMBER, FILENAME, FILESIZE, FILEDATE FROM VPFILELIST WHERE SERIALNUMBER=@SERIALNR DROP INDEX FILELIST.indxfilelist1 CREATE INDEX indxfilelist1 on FILELIST(SERIALNUMBER) with fillfactor=70 Example 3: ---------- INSERT INTO ##STAGE_LOAD SELECT ART_NR, LTRIM(ART_OMS), LTRIM(ART_VRP_EENH), convert(money,ART_PR_EX), convert(money, ART_PR_IN) FROM ##BCP_LOAD Example 4: ---------- INSERT INTO NG39AFGP SELECT CONVERT(INT,AFG_PNT_NR), U_VERSION, AFG_PNT_OMS, CONVERT(bit,AFG_PNT_VAST) FROM ##NG39AFGP 11.27 Use of triggers: ---------------------- A trigger is like a stored procedure, but it is an object that's bound to a table. It will only run (fire) when an INSERT, UPDATE or DELETE Statement is issued to this table. When an INSERT an/or UPDATE and/or DELETE is done on a table, the trigger will fire and the statements in the trigger will execute. A common type of application is that another table will be updated when the Trigger table (the table WHERE the trigger is defined on) is modified, like for example a STOCK table that automatically get's updated when an ORDER is placed in the ORDER table. Special temporary "tables" DELETED and INSERTED can be used with triggers. They have the same datamodel as the table the trigger is defined on. DELETED : stores copy / copies of the row(s) that is affected by the DELETE or UPDATE statement. INSERTED: stores copy / copies of the row(s) that is affected by the INSERT or UPDATE statement. Example 1: ---------- CREATE TRIGGER orders_INSERT ON orders FOR INSERT, UPDATE AS UPDATE stock SET in_stock=in_stock-INSERTed.amount FROM stock, INSERTed WHERE stock.item_id=INSERTed.item_id Example 2: ---------- CREATE TRIGGER employee_insupd ON employee FOR INSERT, UPDATE AS --Get the range of level for this job type FROM the jobs table. declare @min_lvl tinyint, @max_lvl tinyint, @emp_lvl tinyint, @job_id smallint SELECT @min_lvl = min_lvl, @max_lvl = max_lvl, @emp_lvl = i.job_lvl, @job_id = i.job_id FROM employee e, jobs j, INSERTed i WHERE e.emp_id = i.emp_id AND i.job_id = j.job_id IF (@job_id = 1) AND (@emp_lvl <> 10) BEGIN raiserror ('Job id 1 expects the default level of 10.',16,1) ROLLBACK TRANSACTION END ELSE IF NOT (@emp_lvl BETWEEN @min_lvl AND @max_lvl) BEGIN raiserror ('The level for job_id:%d should be between %d AND %d.', 16, 1, @job_id, @min_lvl, @max_lvl) ROLLBACK TRANSACTION END Example 3: ---------- CREATE TRIGGER TR_NG10QGRP_INS on RES_NG10QGRP FOR INSERT AS DECLARE @ERR_MESSAGE VARCHAR(128) DECLARE @FIE_FUNKKODE VARCHAR(10) DECLARE @GRP_GRPKODE VARCHAR(6) SELECT @GRP_GRPKODE = i.GRP_GRPKODE FROM inserted i, RES_NG10QGRP p WHERE i.GRP_GRPKODE=p.GRP_GRPKODE IF EXISTS (SELECT FUG_GRPKODE FROM RES_NG10QFUG WHERE FUG_GRPKODE=@GRP_GRPKODE) BEGIN PRINT 'FUG_GRPKODE ALREADY EXIST IN NG10QFUG' RAISERROR ('FUG_GRPKODE ALREADY EXIST IN NG10QFUG', 16, 1) WITH LOG RETURN END ELSE -- vul blok in QFUG voor deze nieuwe GRP, even dynamisch ophalen uit QFIE BEGIN DECLARE C_1 CURSOR FOR SELECT DISTINCT FIE_FUNKKODE FROM RES_NG10QFIE OPEN C_1 FETCH NEXT FROM C_1 INTO @FIE_FUNKKODE WHILE (@@FETCH_STATUS <> -1) BEGIN INSERT INTO RES_NG10QFUG (FUG_FUNKKODE, FUG_GRPKODE) VALUES (@FIE_FUNKKODE, @GRP_GRPKODE) FETCH NEXT FROM C_1 INTO @FIE_FUNKKODE END CLOSE C_1 DEALLOCATE C_1 END Example 4: ---------- CREATE TRIGGER tr_ng10mod_ins on NG10MOD FOR INSERT as DECLARE @UserName VARCHAR(128) DECLARE @cinfo VARBINARY(128) DECLARE @MODNR NVARCHAR(16) SELECT @cinfo=(SELECT context_info FROM master.dbo.sysprocesses WHERE spid=@@spid) if @cinfo=0x0 -- meaning context_info could not be set begin SET @username=SUSER_SNAME() -- So, take NT/2000 name as alternative end else begin SET @username=convert(VARCHAR(128),@cinfo) end SELECT @MODNR=i.MODNR FROM inserted i, NG10MOD p WHERE i.MODNR=p.MODNR INSERT INTO NG10AUDIT (TABEL,MODNR,MUTATIE,GEBRUIKER) VALUES (NG10MOD,@MODNR,'T',@username) GO Example 5: ---------- View enabled and disabled triggers: SELECT LEFT(sop.name,36) AS 'Table', LEFT(so.name,36) AS 'Trigger', CASE WHEN OBJECTPROPERTY(so.id, 'ExecIsTriggerDisabled') = 1 THEN 'Disabled' ELSE 'Enabled' END AS 'Trigger Status' FROM sysobjects so INNER JOIN sysobjects sop ON so.parent_obj = sop.id WHERE so.xtype = 'TR' ORDER BY 1, 2 Example 6: Some mixed examples --------- CREATE TRIGGER updEmployeeData ON employeeData FOR update AS /*Check whether columns 2, 3 or 4 has been updated. If any or all of columns 2, 3 or 4 have been changed, create an audit record. The bitmask is: power(2,(2-1))+power(2,(3-1))+power(2,(4-1)) = 14. To check if all columns 2, 3, and 4 are updated, use = 14 in place of >0 (below).*/ IF (COLUMNS_UPDATED() & 14) > 0 /*Use IF (COLUMNS_UPDATED() & 14) = 14 to see if all of columns 2, 3, and 4 are updated.*/ BEGIN /*DO SOMETHING HERE */ END GO There also is an example of how to detect a change in all columns is there are more than 8. That example is below. CREATE TRIGGER tr1 ON Customers FOR UPDATE AS IF ( (SUBSTRING(COLUMNS_UPDATED(),1,1)=power(2,(3-1)) + power(2,(5-1))) AND (SUBSTRING(COLUMNS_UPDATED(),2,1)=power(2,(1-1))) ) PRINT 'Columns 3, 5 and 9 updated' GO CREATE TRIGGER trg_tableName_upd ON tableName AFTER UPDATE AS IF UPDATE(columnToCheckValueOn) BEGIN INSERT INTO otherTable SELECT i.* --[or] column1, column2, ... FROM inserted i INNER JOIN deleted d ON i.keyCol = d.keyCol --AND i.keyCol2 = d.keyCol2 WHERE i.columnToCheckValueOn = 'valueToCheckFor' AND i.columnToCheckValueOn <> d.columnToCheckValueOn END --IF CREATE TRIGGER trg_functies ON functies AFTER insert, update AS DECLARE @x INT SELECT @x=(SELECT functie_id from INSERTED) BEGIN INSERT INTO funct_soft (functie_id) values (@x) END 11.28 Ways to run DTS: ---------------------- 1. call a DTS FROM a stored procedure: - via sp_Oacreate - via job cREATE PROCEDURE dbo.mydtsjob AS exec msdb.dbo.sp_start_job @job_name = 'MyDTSJob' - via xp_cmdshell @Result int SET @Result=EXEC xp_cmdshell 'dtsrun 'Mypackage' ' IF @result=0 print 'OK' else print 'Failure' use master exec xp_cmdshell "DTSRun /S servername /U username /P password /N packagename" create proc newprocedurename as exec xp_cmdshell "DTSRun /S servername /U username /P password /N packagename" 11.29 Check on Server AND database: ----------------------------------- DECLARE @db VARCHAR(128) DECLARE @srv VARCHAR(128) SELECT @db=DB_NAME() IF @db in ('master', 'model', 'msdb') BEGIN PRINT 'PROCEDURE MAY NOT RUN IN THE MASTER, MODEL OR MSDB DATABASE.' PRINT 'PROCEDURE TERMINATED.' RETURN END SELECT @srv=@@SERVERNAME IF @srv='CODWDB035P' BEGIN PRINT 'PROCEDURE MAY NOT RUN ON CODA PRODUCTION SERVER.' PRINT 'PROCEDURE TERMINATED.' RETURN END 11.30 Use of sp_executesql: --------------------------- Example 1: ---------- exec sp_executesql N'CREATE TABLE X (ID INT NULL)' Example 2: ---------- declare @Statement NVARCHAR(1024) SET @Statement = N'CREATE TABLE Y (ID INT NULL)' exec sp_executesql @statement 11.32 Use of sp_OaCreate: ------------------------- Example 1: ---------- CREATE PROCEDURE sp_AppendToFile(@FileName varchar(255), @Text1 varchar(255)) AS DECLARE @FS int, @OLEResult int, @FileID int EXECUTE @OLEResult = sp_OACreate 'Scripting.FileSystemObject', @FS OUT IF @OLEResult <> 0 PRINT 'Scripting.FileSystemObject' --Open a file execute @OLEResult = sp_OAMethod @FS, 'OpenTextFile', @FileID OUT, @FileName, 8, 1 IF @OLEResult <> 0 PRINT 'OpenTextFile' --Write Text1 execute @OLEResult = sp_OAMethod @FileID, 'WriteLine', Null, @Text1 IF @OLEResult <> 0 PRINT 'WriteLine' EXECUTE @OLEResult = sp_OADestroy @FileID EXECUTE @OLEResult = sp_OADestroy @FS Example 2: ---------- CREATE PROCEDURE xp_cmdshell(@cmd varchar(255), @Wait int = 0) AS --Create WScript.Shell object DECLARE @result int, @OLEResult int, @RunResult int DECLARE @ShellID int EXECUTE @OLEResult = sp_OACreate 'WScript.Shell', @ShellID OUT IF @OLEResult <> 0 SELECT @result = @OLEResult IF @OLEResult <> 0 RAISERROR ('CreateObject %0X', 14, 1, @OLEResult) EXECUTE @OLEResult = sp_OAMethod @ShellID, 'Run', Null, @cmd, 0, @Wait IF @OLEResult <> 0 SELECT @result = @OLEResult IF @OLEResult <> 0 RAISERROR ('Run %0X', 14, 1, @OLEResult) --If @OLEResult <> 0 EXEC sp_displayoaerrorinfo @ShellID, @OLEResult EXECUTE @OLEResult = sp_OADestroy @ShellID return @result 11.31 Use of WHERE CURRENT OF cursor: ------------------------------------- DECLARE @dossier_id INT DECLARE @status_id_dossier INT DECLARE @status_id_stam INT DECLARE @status INT DECLARE cur1 CURSOR FOR SELECT dossier_id, status_id from G0Q_Dossier OPEN cur1 FETCH NEXT FROM cur1 INTO @dossier_id, @status_id_dossier WHILE (@@fetch_status<>-1) BEGIN IF @status_id_dossier IS NOT NULL BEGIN SELECT @status_id_stam=(select status_id from G0Q_Dossier_status where status=@status_id_dossier) UPDATE G0Q_Dossier set status_id=@status_id_stam where current of cur1 END FETCH NEXT FROM cur1 INTO @dossier_id, @status_id_dossier END CLOSE cur1 DEALLOCATE cur1 GO 11.32 Generate Insert Statements from a table: ---------------------------------------------- This script will generate insert statements for the given tables. You can pass the tables names, separated by commas, into sp_DataAsInsCommand stored procedure as in the example below: EXEC sp_DataAsInsCommand 'employee,titleauthor,pub_info' CREATE PROC sp_DataAsInsCommand ( @TableList varchar (8000)) AS SET NOCOUNT ON DECLARE @position int, @exec_str varchar (2000), @TableName varchar (50) DECLARE @name varchar(128), @xtype int, @status tinyint, @IsIdentity tinyint SELECT @TableList = @TableList + ',' SELECT @IsIdentity = 0 SELECT @position = PATINDEX('%,%', @TableList) WHILE (@position <> 0) BEGIN SELECT @TableName = SUBSTRING(@TableList, 1, @position - 1) SELECT @TableList = STUFF(@TableList, 1, PATINDEX('%,%', @TableList),'') SELECT @position = PATINDEX('%,%', @TableList) SELECT @exec_str = 'DECLARE fetch_cursor CURSOR FOR ' + 'SELECT a.name, a.xtype, a.status FROM syscolumns a, sysobjects b WHERE a.id = b.id and b.name = ''' + @TableName + '''' EXEC (@exec_str) OPEN fetch_cursor FETCH fetch_cursor INTO @name, @xtype, @status IF (@status & 0x80) <> 0 BEGIN SELECT @IsIdentity = 1 SELECT 'SET IDENTITY_INSERT ' + @TableName + ' ON' SELECT 'GO' END SELECT @exec_str = "SELECT 'INSERT INTO " + @TableName + " VALUES (' + " Select ' -- The table name is: ' + @TableName --text or ntext IF (@xtype = 35) OR (@xtype = 99) SELECT @exec_str = @exec_str + '''"None yet"''' ELSE --image IF (@xtype = 34) SELECT @exec_str = @exec_str + '"' + '0xFFFFFFFF' + '"' ELSE --smalldatetime or datetime IF (@xtype = 58) OR (@xtype = 61) SELECT @exec_str = @exec_str + 'Coalesce(' + ' + ''"'' + ' + ' + CONVERT(varchar,' + @name + ',101)' + ' + ''"''' + ',"null")' ELSE --varchar or char or nvarchar or nchar IF (@xtype = 167) OR (@xtype = 175) OR (@xtype = 231) OR (@xtype = 239) SELECT @exec_str = @exec_str + 'Coalesce(' + '''"'' + ' + @name + ' + ''"''' + ',"null")' ELSE --uniqueidentifier IF (@xtype = 36) SELECT @exec_str = @exec_str + ' + Coalesce(''"'' + ' + ' + CONVERT(varchar(255),' + @name + ')' + ' + ''"''' + ',"null")' ELSE --binary or varbinary IF (@xtype = 173) OR (@xtype = 165) SELECT @exec_str = @exec_str + '"' + '0x0' + '"' ELSE SELECT @exec_str = @exec_str + 'Coalesce(CONVERT(varchar,' + @name + '), "null")' WHILE @@FETCH_STATUS <> -1 BEGIN FETCH fetch_cursor INTO @name, @xtype, @status IF (@@FETCH_STATUS = -1) BREAK IF (@status & 0x80) <> 0 BEGIN SELECT @IsIdentity = 1 SELECT 'SET IDENTITY_INSERT ' + @TableName + ' ON' SELECT 'GO' END --text or ntext IF (@xtype = 35) OR (@xtype = 99) SELECT @exec_str = @exec_str + ' + ","' + ' + ''"None yet"''' ELSE --image IF (@xtype = 34) SELECT @exec_str = @exec_str + ' + "," + ' + '"' + '0xFFFFFFFF' + '"' ELSE --smalldatetime or datetime IF (@xtype = 58) OR (@xtype = 61) SELECT @exec_str = @exec_str + ' + ","' + ' + Coalesce(''"'' + ' + ' + CONVERT(varchar,' + @name + ',101)' + ' + ''"''' + ',"null")' ELSE --varchar or char or nvarchar or nchar IF (@xtype = 167) OR (@xtype = 175) OR (@xtype = 231) OR (@xtype = 239) SELECT @exec_str = @exec_str + ' + ","' + ' + Coalesce(''"'' + ' + @name + ' + ''"''' + ',"null")' ELSE --uniqueidentifier IF (@xtype = 36) SELECT @exec_str = @exec_str + ' + ","' + ' + Coalesce(''"'' + ' + ' + CONVERT(varchar(255),' + @name + ')' + ' + ''"''' + ',"null")' ELSE --binary or varbinary IF (@xtype = 173) OR (@xtype = 165) SELECT @exec_str = @exec_str + ' + "," + ' + '"' + '0x0' + '"' ELSE SELECT @exec_str = @exec_str + ' + ","' + ' + Coalesce(CONVERT(varchar,' + @name + '), "null")' END CLOSE fetch_cursor DEALLOCATE fetch_cursor SELECT @exec_str = @exec_str + '+ ")" FROM ' + @TableName EXEC(@exec_str) -- print (@exec_str) SELECT 'GO' IF @IsIdentity = 1 BEGIN SELECT @IsIdentity = 0 SELECT 'SET IDENTITY_INSERT ' + @TableName + ' OFF' SELECT 'GO' END END ---------------------------------------------------------------------- 12. Removing duplicate records: ---------------------------------------------------------------------- In general, how to show duplicates from a table: SELECT col1, col2, count(*) FROM t1 GROUP BY col1, col2 HAVING count(*) > 1 Suppose we create table a create table a (id int, name varchar(20), company varchar(20) ); And we insert some records, including a duplicate on key=3. insert into a (id,name,company) values (1,'jan','abc') insert into a (id,name,company) values (2,'gerrit','abc') insert into a (id,name,company) values (3,'appie','xyz') insert into a (id,name,company) values (3,'sjaan','xyz') insert into a (id,name,company) values (4,'jannie','uvw') insert into a (id,name,company) values (5,'joop','uvw') select * from a id name company ----------- -------------------- -------------------- 1 jan abc 2 gerrit abc 3 appie xyz 3 sjaan xyz 4 jannie uvw 5 joop uvw (6 row(s) affected) So you see that there are two records with an id=3. SELECT id FROM a GROUP BY id HAVING COUNT(*) > 1 id ----------- 3 We have two records with an id = 3, so one has to be deleted. Alter table a to add a temporary column that will help us with the un-duplicate. ---------- Example 1: ---------- -- delete all duplicates DECLARE @id int DECLARE CUR1 CURSOR FOR SELECT id FROM a GROUP BY id HAVING COUNT(*) > 1 OPEN CUR1 FETCH NEXT FROM CUR1 INTO @id WHILE (@@FETCH_STATUS <> -1) BEGIN DELETE FROM a WHERE id=@id FETCH NEXT FROM CUR1 INTO @id END CLOSE CUR1 DEALLOCATE CUR1 ---------- Example 2: ---------- -- keep one row of the duplicates Suppose we have table a again. alter table a add id2 int alter table a drop column id2 ==== DECLARE @id int DECLARE @id2 int DECLARE @MAXID int DECLARE @name varchar(64) DECLARE @i int DECLARE @j int DECLARE @k int SET @i=0 SET @j=0 SET @k=0 -- FIRST FILL COLUMN ID2 WITH INCREASING INTEGERS (1,2,3 etc..). DECLARE CUR1 CURSOR FOR SELECT id, name from a OPEN CUR1 FETCH NEXT FROM CUR1 INTO @id,@name WHILE (@@FETCH_STATUS <> -1) BEGIN SET @k=(SELECT count(*) from a) SET @j=(select count(*) from a where id=@id) SET @i=@i+1 UPDATE a SET id2=@i WHERE id=@id AND name=@name FETCH NEXT FROM CUR1 INTO @id,@name END CLOSE cur1 DEALLOCATE cur1 -- NOW DELETING DUPLICATE ROWS (on basis of key) AND LEAVING UNIQUE ROW IN PLACE. DECLARE CUR1 CURSOR FOR SELECT id, name, id2 from a OPEN CUR1 FETCH NEXT FROM CUR1 INTO @id,@name,@id2 WHILE (@@FETCH_STATUS <> -1) BEGIN SET @j=(select count(*) from a where id=@id) SET @i=@i+1 IF @j>1 SET @MAXID=(SELECT MAX(id2) FROM a WHERE id=@id) BEGIN DELETE FROM a WHERE id=@id AND id2 < @MAXID END FETCH NEXT FROM CUR1 INTO @id,@name,@id2 END CLOSE cur1 DEALLOCATE cur1 Example 3: ---------- Suppose we have this table: CREATE TABLE [dbo].[tbl_EventMatrix] ( [SessionID] [int] NOT NULL , [DaNr] [int] NOT NULL , [HSS Ident] [nvarchar] (15) NOT NULL , [TaTl] [datetime] NOT NULL , [TaTlms] [smallint] NOT NULL , [Duration] [real] NULL , [H/LVaTl] [datetime] NULL , [H/LVaTlms] [smallint] NULL , [H/LValue] [real] NULL , [Value1] [nvarchar] (50) NULL , [Value2] [nvarchar] (50) NULL , [Value3] [nvarchar] (50) NULL , [False] [bit] NOT NULL ) ON [PRIMARY] GO AND suppose you want this PK (or Unique Index) defined: ALTER TABLE [dbo].[tbl_EventMatrix] WITH NOCHECK ADD CONSTRAINT [PK_tbl_EventMatrix] PRIMARY KEY NONCLUSTERED ( [SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms] ) ON [PRIMARY] GO But there are some duplicate records, so the creation of the PK fails. The following script deals with this problem. -- BEGIN SCRIPT Create table ##dup_keys ( [SessionID] [int] NOT NULL , [DaNr] [int] NOT NULL , [HSS Ident] [nvarchar] (15) NOT NULL , [TaTl] [datetime] NOT NULL , [TaTlms] [smallint] NOT NULL ) INSERT INTO ##dup_keys ([SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms]) SELECT [SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms] FROM tbl_EventMatrix GROUP BY [SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms] HAVING COUNT(*) > 1 CREATE TABLE [dbo].[tbl_EventMatrix_TMP] ( [ID] [int] NOT NULL identity(1,1), [SessionID] [int] NOT NULL , [DaNr] [int] NOT NULL , [HSS Ident] [nvarchar] (15) NOT NULL , [TaTl] [datetime] NOT NULL , [TaTlms] [smallint] NOT NULL , [Duration] [real] NULL , [H/LVaTl] [datetime] NULL , [H/LVaTlms] [smallint] NULL , [H/LValue] [real] NULL , [Value1] [nvarchar] (50) NULL , [Value2] [nvarchar] (50) NULL , [Value3] [nvarchar] (50) NULL , [False] [bit] NOT NULL ) ON [PRIMARY] GO INSERT INTO tbl_EventMatrix_TMP ([SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms],[Duration],[H/LVaTl],[H/LVaTlms],[H/LValue],[Value1],[Value2],[Value3],[False]) SELECT * FROM tbl_EventMatrix truncate table tbl_EventMatrix -- CREATE LOOP DECLARE @SessionID int DECLARE @DaNr int DECLARE @HSS nvarchar(15) DECLARE @TaTl datetime DECLARE @TaTlms smallint DECLARE @MAXID int DECLARE cur1 CURSOR FOR SELECT [SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms] FROM ##dup_keys OPEN cur1 FETCH NEXT FROM cur1 INTO @SessionID,@DaNr,@HSS,@TaTl,@TaTlms WHILE (@@fetch_status<>-1) BEGIN SELECT @MAXID=(SELECT MAX(ID) FROM tbl_EventMatrix_TMP WHERE [SessionID]= @SessionID AND [DaNr] = @DaNr AND [HSS Ident]= @HSS AND [TaTl] = @TaTl AND [TaTlms] = @TaTlms) DELETE FROM tbl_EventMatrix_TMP WHERE ([SessionID]= @SessionID AND [DaNr] = @DaNr AND [HSS Ident]= @HSS AND [TaTl] = @TaTl AND [TaTlms] = @TaTlms AND ID < @MAXID) FETCH NEXT FROM cur1 INTO @SessionID,@DaNr,@HSS,@TaTl,@TaTlms END CLOSE cur1 DEALLOCATE cur1 INSERT INTO tbl_EventMatrix SELECT [SessionID],[DaNr],[HSS Ident],[TaTl],[TaTlms],[Duration],[H/LVaTl],[H/LVaTlms],[H/LValue],[Value1],[Value2],[Value3],[False] FROM tbl_EventMatrix_TMP DROP TABLE tbl_EventMatrix_TMP -- END SCRIPT ---------------------------------------------------------------------- 12. SOME DISASTER RECOVERY SCENARIO'S: ---------------------------------------------------------------------- IF you encounter a severe error in a SQL Server database, like corruption, or some sort of serious crash, you should ofcourse first rely on your Backup-Recovery procedures. There is no substitute for good regular backups of your master, msdb, AND your production databases !!!! But that's a trivial statement. On the other hAND, there could be situations WHERE it might not be easy to apply a backup. Or it might turn out that you do not have a good recent backup. There are some situations WHERE the following tricks might help you revive your database, without restoring a backup. Again, please note that this section may only be regarded as complementary to good backup/restore procedures. -- 12.1 'suspect' database. -- ------------------------ IF you have a suspect database, that type of status will be shown in the Enterprise manager. No USEr can access the database. Thus, the database is not accessible. Possible solution 1: -------------------- IF the suspect status is due to the fact that one more files really are missing, or corrupt, you should restore a backup. But... The suspect status could also be due to a situation WHERE one or more files are, for example, full (AND/or are not allowed to grow further). You can imagine a situation WHERE SQL Server needs to recover, but cannot apply the entries FROM the transaction log, becaUSE the database files are "full" or the disks are full. You might find evidence for this by inspection the SQL Server log. For example, that is the log: 'C:\Program Files\sql2000\MSSQL\log\ERRORLOG'. Suppose that you have, in one way or the other, increased free diskspace. Now SQL Server has, in principle at least, a way to recover. Suppose your suspect database is called 'SALES'. Now execute sp_resetstatus. This procedure modIFies the system tables, so the system administrator must enable updates to the system tables. In order to make updates to the system catalog possible, we "tell" SQL Server that it's allowed to make updates: USE master GO sp_configure 'allow updates', 1 GO RECONFIGURE WITH OVERRIDE GO Now USE sp_resetstatus: For example: exec sp_resetstatus 'SALES' AND restart SQL server (stop AND start MSSQLServer service) IF this have not helped, maybe SQL server needs another new datafile or logfile to complete recovery. You could add a datafile or a logfile to your database, via special stored procedures - to add a datafile (.ndf) AND revive the database, USE: sp_add_data_file_recover_suspect_db Adds a data file to a filegroup when recovery cannot complete on a database due to an "insufficient space" (1105) error on the filegroup. After the file is added, this stored procedure turns off the suspect setting AND completes the recovery of the database - to add a transaction log file AND revive the database, USE: sp_add_log_file_recover_suspect_db This procedure adds a log file to a filegroup when recovery cannot complete on a database due to an "insufficient log space" (9002) error. After the file is added, this stored procedure turns off the suspect setting AND completes the recovery of the database. Example syntax: sp_add_log_file_recover_suspect_db [@dbName =] database', [@name =] 'logical_file_name', [@filename =] 'os_file_name', [@size =] 'size', [@maxsize =] 'max_size', [@filegrowth =] 'growth_increment' Example: Suppose you suspect that you need to add another datafile. Suppose the database in question is called 'SALES'. Suppose you have space free on H: Then you could try a statement like the following: sp_add_log_file_recover_suspect_db SALES, sales_data_005, ‘h:\mssql7\data\sales_data_005.ndf', 50 The stored procedure will add the file to your database AND will 'reset' your database status to a normal value. Possible solution 2: -------------------- IF nothing helps, AND filesizes AND diskspaces seems ok, AND you cannot find a real reason why you have the suspect status, AND you do NOT (!) have good backups, AND you feel totally lost by now, maybe you can USE this procedure. A suspect database can be placed in 'emercency mode'. IF you have placed a database in emercency mode, it might be possible to salvage data via bcp, DTS, SELECT queries etc.. You must consider this option as a last resort. To place the database 'SALES' in emercency mode, you must manually update the system table master.dbo.sysdatabases AND change the status field of database 'SALES'. First, make it possible to manually update the system tables using the following statements: USE master exec sp_configure 'allow updates', 1 reconfigure with override go Update sysdatabases Set status= 32768 WHERE name=’SALES’ -- 12.2 Re-attach of a database. -------------------------------- Suppose you have a Server with multiple disks. Suppose there is a crash in such a way, that NT/Win2K AND/or SQL Server must be reinstalled (for example, a disk crash WHERE the program files resides on, or disk crash of the boot/system disk of NT/2000). Suppose further, that you have a database 'sales', which files are on disks unaffected by any crash. So, here is a situation WHERE NT/2000 or SQL Server may be unusable, but you have your database files intact. Ok, so you reinstall NT/2000 AND/or SQL Server, possibly on new disks. How to 're-attach', or 'register' your sales database in the fresh SQL Server installation? In other words, how do you update your master database with existence of the marketing database. There are at least 2 ways to do that en get back into business: Solution 1: CREATE DATABASE ... FOR ATTATCH ------------------------------------------- After the fresh SQL Server installation is complete, you can 'register' your sales database with the rather special commAND "CREATE DATABASE .. FOR ATTACH". You only need to mention your .mdf file location in this commAND, becaUSE actually the primary .mdf file contains information about the paths AND names of any other database file. In the example of the sales database, you would enter a commAND similar to the following: CREATE DATABASE sales ON PRIMARY ( name='sales’, filename=’f:\mssql\sales.mdf’ ) FOR ATTACH Your database is back online AND the data is accessible. But you might have problems with the defined logins AND database USErs. Solution 2: USE sp_attachdb --------------------------- As an example, suppose your 'sales' database files resides on the following disks with the following layout: F:\mssql\sales.mdf G:\mssql\sales_data_001.ndf H:\mssql\sales_data_002.ndf I:\mssql\sales_data_003.ndf J:\mssql\sales_log_001.ldf K:\mssql\sales_log_002.ldf After you have re-installed NT/2000 AND SQL Server. you can then USE the system stored procedure sp_attachdb to register your sales database: Sp_attach_db ‘sales’, @filename1=’F:\mssql\sales.mdf’, @filename2=’G:\mssql\sales_data_001.ndf’, @filename3=’H:\mssql\sales_data_002.ndf’, @filename4=’I:\mssql\sales_data_003.ndf’, @filename5=’J:\mssql\sales_log_001.ldf’, @filename6=’K:\mssql\sales_log_002.ldf’ Now your sales database is back online. Still you will probably have some troubles with login's AND database USErs of the sales database. So, having a backup of the master database AND the sales database is better ofcourse. Anyway, the solution is at least a way to get all your data back. Please note: - You can also USE sp_attachdb to move your database to dIFferent disks in your same SQL Server. In this case, there are no problems at all with defined database USErs. You should then first USE 'sp_detachdb', then move the files to their new location, AND execute 'sp_attachdb' to register your database again. - You can also USE sp_detachdb AND sp_attachdb to move or copy a database across Servers (IF both USE the same character set) -- 12.3: Backup & Restore: ========================== 1. Type of backup or recovery modes of a database: -------------------------------------------------- Simple Recovery --------------- Simple Recovery requires the least administration. In the Simple Recovery model, data is recoverable only to the most recent full database or differential backup. Transaction log backups are not used, and minimal transaction log space is used. After the log space is no longer needed for recovery from server failure, it is reused. With the Simple Recovery model, the database can be recovered to the point of the last backup. However, you cannot restore the database to the point of failure or to a specific point in time. To do that, choose either the Full Recovery or Bulk-Logged Recovery model. The backup strategy for simple recovery consists of: -- Database backups. -- Differential backups (optional). Note: This model is similar to setting the trunc. log on chkpt. database option in Microsoft® SQL Server™ version 7.0 or earlier. Full and Bulk-Logged Recovery ----------------------------- Full Recovery and Bulk-Logged Recovery models provide the greatest protection for data. These models rely on the transaction log to provide full recoverability and to prevent work loss in the broadest range of failure scenarios. The Full Recovery model provides the most flexibility for recovering databases to an earlier point in time. For more information, see Full Recovery. The Bulk-Logged model provides higher performance and lower log space consumption for certain large-scale operations (for example, create index or bulk copy). It does this at the expense of some flexibility of point-in-time recovery. For more information, see Bulk-Logged Recovery. The backup strategy for full recovery consists of: -- Database backups. -- Differential backups (optional). -- Transaction log backups. Example of creating a differential backup: ------------------------------------------ It is not possible to create a differential database backup unless the database has been backed up first. -- Create a full database backup first. BACKUP DATABASE IAMV TO IAMVDUMP WITH INIT GO -- Time elapses. -- Create a differential database backup, appending the backup -- to the backup device containing the database backup. BACKUP DATABASE MyNwind TO IAMVDUMP WITH DIFFERENTIAL GO Example of restore a full backup and a differential backup: ----------------------------------------------------------- Execute the RESTORE DATABASE statement, specifying the NORECOVERY clause, to restore the database backup preceding the differential database backup. Execute the RESTORE DATABASE statement to restore the differential database backup, specifying: --The name of the database to which the differential database backup will be applied. --The backup device where the differential database backup will be restored from. --The NORECOVERY clause if you have transaction log backups to apply after the differential database backup is restored, otherwise specify the RECOVERY clause. -- Assume the database is lost at this point. Now restore the full -- database. Specify the original full backup and NORECOVERY. -- NORECOVERY allows subsequent restore operations to proceed. RESTORE DATABASE IAMV FROM IAMVDUMP WITH NORECOVERY GO -- Now restore the differential database backup, the second backup on -- the IAMVDUMP backup device. RESTORE DATABASE IAMV FROM IAMVDUMP WITH FILE = 2, RECOVERY GO Example of restore a full backup, differental backup and transactionlog backup: ------------------------------------------------------------------------------- This example restores a database, differential database, and transaction log backup of the IAMV database. -- Assume the database is lost at this point. Now restore the full -- database. Specify the original full backup and NORECOVERY. -- NORECOVERY allows subsequent restore operations to proceed. RESTORE DATABASE IAMV FROM MyNwind_1 WITH NORECOVERY GO -- Now restore the differential database backup, the second backup on -- the MyNwind_1 backup device. RESTORE DATABASE IAMV FROM MyNwind_1 WITH FILE = 2, NORECOVERY GO -- Now restore each transaction log backup created after -- the differential database backup. RESTORE LOG IAMV FROM iamv_log1 WITH NORECOVERY GO RESTORE LOG IAMV FROM iamv_log2 WITH RECOVERY GO Example: Restore of full and diff backups: ------------------------------------------ Stel we hebben een database SharePointPortal_Sites", met een voorbeeld tabel XYZ. XYZ bevat nu alleen nog 1 record. insert into XYZ values (1,'waarde 1') SELECT * from XYZ id name ----------- -------------------- 1 waarde 1 (1) Stel we maken nu een FULL backup of database "SharePointPortal_Sites": backup database SharePointPortal_Sites to FULLBACKUP_SPPS with init (2) We voeren een record in in XYZ insert into XYZ values (2,'waarde 2') SELECT * FROM XYZ id name ----------- -------------------- 1 waarde 1 2 waarde 2 Dus dit "tweede" record is niet opgenomen in de full backup. (3) We maken nu een eerste DIFF backup: backup database SharePointPortal_Sites to DIFFBACKUP_SPPS with differential, init (4) We voeren nu een derde record in in XYZ insert into XYZ values (3,'waarde 3') SELECT * FROM XYZ id name ----------- -------------------- 1 waarde 1 2 waarde 2 3 waarde 3 Dus dit "derde" record is niet opgenomen in de full en 1ste DIFF backup. (5) We maken nu een tweede DIFF backup: backup database SharePointPortal_Sites to DIFFBACKUP_SPPS with differential, noinit (6) We doen nu een vierde record in XYZ insert into XYZ values (4,'waarde 4') SELECT * FROM XYZ id name ----------- -------------------- 1 waarde 1 2 waarde 2 3 waarde 3 4 waarde 4 (7) We maken nu een derde DIFF backup: backup database SharePointPortal_Sites to DIFFBACKUP_SPPS with differential, noinit (8) NU HEBBEN WE EEN CRASH VAN SharePointPortal_Sites Wat moeten we restoren? We hebben 1 FULL backup We hebben 3 DIFF backups. Experiment 1: ------------- RESTORE DATABASE SharePointPortal_Sites FROM FULLBACKUP_SPPS WITH NORECOVERY GO RESTORE DATABASE SharePointPortal_Sites FROM DIFFBACKUP_SPPS WITH RECOVERY GO Database is weer open: SELECT * FROM XYZ id name ----------- -------------------- 1 waarde 1 2 waarde 2 Dit correspondeerd precies met de FULL backup met de 1ste DIFF backup. More Backup and Restore Examples: --------------------------------- sp_addumpdevice 'disk', 'bbv_full', 'g:\bbv\bbv_full.bak' sp_addumpdevice 'disk', 'bbv_log', 'g:\bbv\bbv_log.bak' backup database bbv to bbv_full with init backup log bbv to bbv_log with init Restore example 1: ------------------ RESTORE DATABASE MyNwind FROM MyNwind_1, MyNwind_2 WITH NORECOVERY RESTORE LOG MyNwind FROM MyNwindLog1 WITH NORECOVERY RESTORE LOG MyNwind FROM MyNwindLog2 WITH RECOVERY, STOPAT = 'Apr 15, 1998 12:00 AM' Restore example 2: ------------------ RESTORE DATABASE TestDB FROM DISK = 'c:\Northwind.bak' WITH MOVE 'Northwind' TO 'c:\test\testdb.mdf', MOVE 'Northwind_log' TO 'c:\test\testdb.ldf' Restore example 3: ------------------ RESTORE DATABASE bbv FROM DISK = 'g:\bbv\bbv.bak' WITH MOVE 'ZOG39BBV_Data' TO 'G:\BBV\bbv_data.mdf', MOVE 'ZOG39BBV_Log' TO 'G:\BBV\bbv_log.ldf', REPLACE Restore example 4: ------------------ restore database bbv with recovery -- 12.4: Restore master AND msdb: ================================= - To start the default instance of SQL Server in single-user mode FROM a commAND prompt SQL2000: -------- FROM a commAND prompt, enter: sqlservr.exe -c -m SQL 7: ------ FROM a commAND prompt, enter: sqlservr.exe -m - To start a named instance of SQL Server in single-user mode FROM a commAND prompt FROM a commAND prompt, enter: sqlservr.exe -c - m -s {instancename} - To restore the master database Start Microsoft® SQL Server™ in single-user mode. Execute the RESTORE DATABASE statement to restore the master database backup, specIFying: The backup device FROM WHERE the master database backup will be restored. Examples This example restores the master database backup FROM tape without using a permanent (named) backup device. USE master GO RESTORE DATABASE master FROM TAPE = '\\.\Tape0' GO RESTORE DATABASE master FROM DISK = 'c:\master.bak' Sequence: 1. restore master 2. restore msdb 3. restore user databases The msdb databse might hold the backup information, which you might need to restore the application DBs. In SQL 200: Also run sp_dropserver sp_addserver In SQL 7: run setup again to change the servername -- 12.5: Repair corrupt systemtable: ------------------------------------ This stored procedure can be used to fix a corruption in a system table by recreate the index. Syntax sp_fixindex database, systemcatalog, ind_id WHERE database - is the database name. database is sysname. systemcatalog - is the system table name. systemcatalog is sysname. ind_id - is the index id value. ind_id is int Note. Before using this stored procedure the database has to be in single user mode. Example: USE pubs GO EXEC sp_fixindex pubs, sysindexes, 2 GO -- 12.6: Alter a Database: -------------------------- ALTER DATABASE database { ADD FILE [,...n] [TO FILEGROUP filegroup_name] | ADD LOG FILE [,...n] | REMOVE FILE logical_file_name | ADD FILEGROUP filegroup_name | REMOVE FILEGROUP filegroup_name | MODIFY FILE | MODIFY FILEGROUP filegroup_name filegroup_property } ::= (NAME = logical_file_name [, FILENAME = 'os_file_name' ] [, SIZE = size] [, MAXSIZE = { max_size | UNLIMITED } ] [, FILEGROWTH = growth_increment] ) Example 1: Add a file: ---------------------- ALTER DATABASE Test1 ADD FILE ( NAME = Test1dat2, FILENAME = 'c:\mssql7\data\t1dat2.ndf', SIZE = 5MB, MAXSIZE = 100MB, FILEGROWTH = 5MB ) TO FILEGROUP DATA GO Example 2: Shrink a file: ------------------------- DBCC SHRINKFILE (DataFile1, 700) ---------------------------------------------------------------------- 13. Database Creation script examples: ---------------------------------------------------------------------- 13.1. Simple example: --------------------- /* Create Database SALES: */ /* Just 1 system file, 1 datafile, 1 indexfile, 1 logfile */ create database SALES on PRIMARY ( name='SALES', filename='f:\mssql7\SALES.mdf', size=400MB, filegrowth= 0MB, maxsize= 400MB ), FILEGROUP SALES_DATA ( name='SALES_DATA_001', filename='g:\mssql7\SALES_DATA_001.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), FILEGROUP SALES_INDEX ( name='SALES_INDEX_001', filename='h:\mssql7\SALES_INDEX_001.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ) LOG ON ( name='SALES_LOG_001', filename='i:\mssql7\SALES_LOG_001.ldf', size= 3000MB, filegrowth= 0MB, maxsize= 3000MB ) ALTER DATABASE SALES MODIFY FILEGROUP SALES_DATA DEFAULT GO 13.2. Extensive example: ------------------------ /* Create Database UMDB */ /* Creates one system file, 16 datafiles, AND 4 log files */ create database UMDB on PRIMARY ( name='UMDB', filename='f:\mssql7\UMDB.mdf', size=400MB, filegrowth= 0MB, maxsize= 400MB ), FILEGROUP UMDB_DATA ( name='UMDB_DATA_001', filename='g:\mssql7\UMDB_DATA_001.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_002', filename='h:\mssql7\UMDB_DATA_002.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_003', filename='i:\mssql7\UMDB_DATA_003.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_004', filename='j:\mssql7\UMDB_DATA_004.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_005', filename='k:\mssql7\UMDB_DATA_005.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_006', filename='l:\mssql7\UMDB_DATA_006.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_007', filename='m:\mssql7\UMDB_DATA_007.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), ( name='UMDB_DATA_008', filename='n:\mssql7\UMDB_DATA_008.ndf', size= 4000MB, filegrowth= 0MB, maxsize= 4000MB ), FILEGROUP UMDB_EXT ( name='UMDB_EXT_001', filename='t:\mssql7\UMDB_EXT_001.ndf', size= 2000MB, filegrowth= 0MB, maxsize= 2000MB ), FILEGROUP UMDB_SIEBEL ( name='UMDB_SIEBEL_001', filename='u:\mssql7\UMDB_SIEBEL_001.ndf', size= 2000MB, filegrowth= 0MB, maxsize= 2000MB ), FILEGROUP UMDB_IND ( name='UMDB_IND_001', filename='v:\mssql7\UMDB_IND_001.ndf', size= 2000MB, filegrowth= 0MB, maxsize= 2000MB ) LOG ON ( name='UMDB_LOG_001', filename='o:\mssql7\UMDB_LOG_001.ldf', size= 3000MB, filegrowth= 0MB, maxsize= 3000MB ), ( name='UMDB_LOG_002', filename='p:\mssql7\UMDB_LOG_002.ldf', size= 3000MB, filegrowth= 0MB, maxsize= 3000MB ), ( name='UMDB_LOG_003', filename='q:\mssql7\UMDB_LOG_003.ldf', size= 3000MB, filegrowth= 0MB, maxsize= 3000MB ), ( name='UMDB_LOG_004', filename='r:\mssql7\UMDB_LOG_004.ldf', size= 3000MB, filegrowth= 0MB, maxsize= 3000MB ) /* Change the default filegroup FROM primary to VPT_DATA */ ALTER DATABASE UMBD MODIFY FILEGROUP UMDB_DATA DEFAULT GO ---------------------------------------------------------------------- 14. OTHER STUFF: ---------------------------------------------------------------------- ---------------------------- 14.1 XML primer: Tutorial 1: ---------------------------- XML: ==== XML was designed to describe data and focus on what data is. HTML was designed to display data and focus on how data looks. Extensible Markup Language (XML) is a meta-markup language that provides a format for describing structured data. This facilitates more precise declarations of content and more meaningful search results across multiple platforms. In addition, XML is enabling a new generation of Web-based data viewing and manipulation applications. An xml file looks like: Tove Jani Reminder Don't forget me this weekEND! It has a root element AND child elements. An element can have attributes. All XML documents must have a root element. All XML documents must contain a single tag pair to define a root element. All other elements must be within this root element. All elements can have sub elements (child elements). Sub elements must be correctly nested within their parent element: ..... - There may be a first line in the document - the XML declaration - which defines the XML version AND the character encoding used in the document. - There may also be a DTD, which is used is to define the legal building blocks of an XML document. It defines the document structure with a list of legal elements. A better alternative to DTD is the use of XSD xml Schema Definition. XML is a meta-markup language, a set of rules for creating semantic tags used to describe data. An XML element is made up of a start tag, an end tag, and data in between. The start and end tags describe the data within the tags, which is considered the value of the element. For example, the following XML element is a element with the value "Ed Wood." Ed Wood The element name "director" allows you to mark up the value "Ed Wood" semantically, so you can differentiate that particular bit of data from another, similar bit of data. For example, there might be another element with the value "Ed Wood." Ed Wood Because each element has a different tag name, you can easily tell that one element refers to Ed Wood, the director of Jail Bait, while the other refers to Ed Wood, the lead actor in Glen or Glenda. If there were no way to mark up the data semantically, having two elements with the same value might cause some confusion. In addition, XML tags are case-sensitive, so the following are each a different element. Attributes: An element can optionally contain one or more attributes. An attribute is a name-value pair separated by an equal sign (=). Westfield In this example above, ZIP="01085" is an attribute of the element. Attributes are used to attach additional, secondary information to an element, usually meta-information. Attributes can also accept default values, while elements cannot. Each attribute of an element can be specified only once, but in any order. From HTML you will remember this: . The SRC attribute provides additional information about the IMG element. In HTML (and in XML), attributes provide additional information about elements, for example: Attributes often provide information that is not a part of the data. In the example below, the file type is irrelevant to the data, but important to the software that wants to manipulate the element: computer.gif HTML is all about how to show data, and XML decribes what the data means. XML in HTML: ------------ What is an XML data island? A data island is an XML document that exists within an HTML page. It allows you to script against the XML document without having to load it through script or through the tag. Almost anything that can be in a well-formed XML document can be inside a data island. The element marks the beginning of the data island, and its ID attribute provides a name that you can use to reference the data island. The XML for a data island can be either inline: Mark Hanson 81422 or referenced through a SRC attribute on the tag: You can also use the Show XML (in a browser): ------------------------ Raw XML files can be viewed in IE 5.0 (and higher) and in Netscape 6, but to make it display like a web page, you have to add some display information. XML documents do not carry information about how to display the data. Since XML tags are "invented" by the author of the XML document, browsers do not know if a tag like describes an HTML table or a dining table. Without any information about how to display the data, most browsers will just display the XML document as it is. To "add" information about HOW to display the data contained in xml, a number of methods exists: - css: (old method, not much used) , links the XML file to the CSS file: - XSL: also links the XML file to information about how to display the data XSL: ---- XSL - The Style Sheet of XML Because XML does not use predefined tags (we can use any tags we want), the meanings of these tags are not understood:
could mean an HTML table, a piece of furniture, or something else. A browser does not know how to display an XML document. IF you only have an xml file, IF loaded in the browser, it will show the root AND child elements in a hierarchical way. Therefore there must be something in addition to the XML document that describes how the document should be displayed; AND that is XSL! XSLT - XSL Transformations XSLT is the most important part of the XSL StANDard. It is the part of XSL that is used to transform an XML document into another XML document, or another type of document that is recognized by a browser. One such format is XHTML. Normally XSLT does this by transforming each XML element into an XHTML element. XSLT can also add new elements into the output file, or remove elements. It can rearrange AND sort elements, AND test AND make decisions about which elements to display, AND a lot more. A common way to describe the transformation process is to say that XSL uses XSLT to transform an XML source tree into an XML result tree. The correct way to declare an XSL style sheet according to the W3C XSL RecommENDation is: or: EXAMPLE: ======== Start with your XML Document. 1. Suppose we want to transform the following XML document ("cdcatalog.xml") into XHTML: Empire Burlesque Bob Dylan USA Columbia 10.90 1985 . . . 2. Then you create an XSL Style Sheet ("cdcatalog.xsl") with a transformation template:

My CD Collection

Title Artist
3. Link the XSL Style Sheet to the XML Document. Finally, add an XSL Style Sheet reference to your XML document ("cdcatalog.xml"): Empire Burlesque Bob Dylan USA Columbia 10.90 1985 . . . The result, viewed in a browser, is a table listing all titles AND artists in a grid. XSD: XML Schema Definition ========================== XML Schema is an XML based alternative to DTD. An XML schema describes the structure of an XML document. The XML Schema language is also referred to as XML Schema Definition (XSD). An XML Schema: defines elements that can appear in a document defines attributes that can appear in a document defines which elements are child elements defines the order of child elements defines the number of child elements defines whether an element is empty or can include text defines data types for elements AND attributes defines default AND fixed values for elements AND attributes When data is sent FROM a sENDer to a receiver it is essential that both parts have the same "expectations" about the content. With XML Schemas, the sENDer can describe the data in a way that the receiver will understAND. EXAMPLE: ======== 1. A Simple XML Document Look at this simple XML document called "note.xml": Tove Jani Reminder Don't forget me this weekEND! 2. A Simple DTD This is a simple DTD file called "note.dtd" that defines the elements of the XML document above ("note.xml"): Line 1 defines the note element to have four elements: "to, FROM, heading, body". Line 2-5 defines the to element to be of the type "#PCDATA", the FROM element to be of the type "#PCDATA", AND so on... 3. A Simple XML Schema defined in XSD This is a simple XML Schema file XSD called "note.xsd" that defines the elements of the XML document above ("note.xml"): The note element is said to be of a complex type because it contains other elements. The other elements (to, FROM, heading, body) are said to be simple types because they do not contain other elements. Referencing a Schema in an XML Document: This XML document has a reference to an XML Schema: Tove Jani Reminder Don't forget me this weekEND! 14.2 Example of XML formatted data as a result set of a query: -------------------------------------------------------------- IF you have a virtual directory configured in IIS AND have configured SQL XML Support in IIS. Suppose this directory is "c:\reskit\xmltest". in browser enter as url: http://w2ksql/xmltest?sql=SELECT+customers.customerid,orderid,orderdate+ FROM+CUSTOMERS+INNER+JOIN+ORDERS+ON+customers.customerid=orders.customerid+FOR+XML+AUTO&root=root Output: - + + + - - - etc.. You can also call a stored procedure: sql=EXECUTE+stored_procedure&root=root 14.3 Example of XML Template: ----------------------------- IF you have the virtual directory configured to allow the use of template queries, AND have given a Virtual name of for example "templates", then create the following sample file AND save it as suppliers.xml in the virtual directory: SELECT SupplierID, CompanyName, ContactName, Phone FROM suppliers ORDER BY CompanyName FOR XML AUTO FROM a browser a client can now enter http://localhost/xmltest/templates/suppliers.xml Output: - 14.4 Using XLS in a template: ----------------------------- IF you use a template such as example 14.3, an Extensible Stylesheet Language (XSL) style sheet can be applied to the query results. When you execute a template using HTTP, you can specIFy an XSL file in these ways: -Use the sql:xsl attribute in the template. -Use the xsl keyword as part of the URL to specIFy the XSL file that will be used to process the resulting XML data. --> In this example, a template includes a simple SELECT statement. The query result is processed according to the instructions in the XSL file specIFied using sql:xsl. SELECT FirstName, LastName FROM Employees FOR XML AUTO --> In this example we use the xsl keyword in the url: http://IISServer/nwind/template/templateFile.xml?xsl=MyXSL.xsl 14.5 XDR: --------- You can create XML views of relational data using XDR (XML-Data Reduced) schemas. These views can then be queried using XPath queries. This is similar to creating views using CREATE VIEW statements AND specIFying SQL queries against the view. In an XDR schema, the element encloses the entire schema. As properties of the element, you can describe attributes that define the schema name AND the namespaces in which the schema reside. In the XDR language, all element declarations must be contained within the element. The minimum XDR schema is: ... Example of an XDR Schema This example shows how annotations are added to the XDR schema. This XDR schema consists of an element AND the EmpID, Fname, AND Lname attributes. Now, annotations are added to this XDR schema to map its elements AND attributes to the database tables AND columns. This is the annotated XDR schema: In the mapping schema, the element is mapped to the Employees table using sql:relation annotation. The attributes EmpID, Fname, AND Lname are mapped to the EmployeeID, FirstName, AND LastName columns in the Employees table using the sql:field annotations. This annotated XDR schema provides the XML view of the relational data. This XML view can be queried using the XPath (XML Path) language. The query returns an XML document as a result, instead of the rowset returned by the SQL queries. ---- 14.6 OPENXML: ------------- Up to this point we have been dealing with how to get data from SQL Server and display it in an XML format. What about if we want to get data from an XML document into SQL Server? This is possible using OPENXML. The syntax for OPENXML is like this: OPENXML(iDoc, RowPattern, [Flags], [WITH (SchemaDeclaration | TableName)] Let’s talk about those parameters. iDoc . We get this by calling a stored procedure called sp_xml_preparedocument. We’ll talk more about this stored procedure in a moment. The RowPattern parameter specified which nodes we want OPENXML to process using XPath. The Flags parameter specifies the format of our results. The following values can be used: 0 – Default value. Attribute centric mapping. 1 – Use Attribute centric mapping. 2 – Use element centric mapping. 8 – Only unconsumed data should be copied to the overflow property @mp;xmltext. So what is attribute and element centric mapping? Attribute centric grabs the data from specific elements whereas element centric grabs data from specific sub elements. This will all make sense when we do a couple of examples. It is possible to enter a value of 3 for the Flag parameter which indicates that both attribute and element are to be used together. The WITH clause can be left out completely. However, if it is used there are two options that can be used with it. The first is by associating the results with an existing database table. This comes in handy when the XML document is formatted to fit the structure of table. The other is to specify specific columns and their data types. It’s time to show how all of this works. Here is the XML document we are going to use in the following examples. Ivan Tedesco Jeremy McGrath David Vuillemin Damon Bradshaw Travis Preston Ricky Carmichael Sebastien Tortelli Now apply OPENXML to this XML document and here is what it looks like using Attribute-Centric mapping: Declare @rDoc int, @sDoc varchar(4000) Set @sDoc = ' Ivan Tedesco Jeremy McGrath David Vuillemin Damon Bradshaw Travis Preston Ricky Carmichael Sebastien Tortelli ' EXEC sp_xml_preparedocument @rDoc OUTPUT, @sDoc SELECT Sponsor FROM OPENXML (@rDoc, ‘/ROOT/Team’, 1) With Team EXEC sp_xml_removedocument @rDoc Let’s take a minute and examine each piece of this. The first thing we do is load our XML document into the @sDoc variable and pass that into the sp_xml_preparedocument. This procedure takes the incoming XML and gets it ready to be used by the OPENXML statement. It does this by parsing the XML and then creating an internal representation of the document in memory. In our OPENXML statement is an XPath statement ‘/ROOT/Team’ which tells OPENXML to loop through the Team nodes and then return all the values of the Sponsor column. We then use the sp_xml_removedocument to remove the XML document from memory and any references to it. In this Attribute-Centric example, an attribute name maps directly to a column name of the same name in the result set. We can also insert records using OPENXML and it is not that difficult. Our source XML stays the same but our SQL Statement would look like this: INSERT Team (Sponsor) SELECT DISTINCT Sponsor FROM OPENXML (@rDoc, ‘ROOT/Team’, 1) WITH (Sponsor varchar(50) ‘@Sponsor’) Now when go look in the Team table we should see a few records inserted into the table that look like the following: TeamID Sponsor 1 Yamaha 2 Honda 3 Kawasaki 4 Suzuki We could continue this and create follow-up INSERT statements to insert into Class and Rider tables also. But we’ll save this for the Worksheet. If we can insert records we should be able to update records also, true? Not only is it true, but it is quite easy. First, go into the Rider table and add a RiderNumber column. You can either do this manually thru Enterprise Manager or run the following code in SQL Analyzer: ALTER TABLE Rider ADD RiderNumber int NULL Now, let’s modify the source XML. Add a Number attribute to each Rider element as follows: Ivan Tedesco Jeremy McGrath It doesn’t matter what the actual numbers are just as long each Rider element has a Number attribute. Now we modify our SQL to look like the following: UPDATE Rider SET RiderNumber = r.RiderNumber FROM ( SELECT Number, OPENXML (@rDoc, ‘/ROOT/Team/Class/Rider’, 1) WITH (Number int ‘@Number’, Class)) r, Team t, Rider rt WHERE t.TeamID = r.TeamID AND r.Rider = rt.RiderName Now when we look in the Rider table we should see the new RiderNumber field and the new values in the new column. ---------------------------------------------------------------------- 15. SQL Server buildnumbers AND service packs: ---------------------------------------------------------------------- 15.1 SQL Server 7 buildnumbers and service packs: ------------------------------------------------- SQL Server 7.0/MSDE 1 Main Releases 7.00.1063 Service Pack 4 7.00.961 Service Pack 3 7.00.842 Service Pack 2 7.00.699 Service Pack 1 7.00.623 RTM All Releases 7.00.1078 SP 4 + Q327068 7.00.1077 SP 4 + Q327068 7.00.1076 SP 4 + Q327068 7.00.1063 Service Pack 4 7.00.1030 SP 3 + Q318268 7.00.1004 SP 3 + Q304851 7.00.996 SP 3 + 7.00.978 SP 3 + Q285870 7.00.977 SP 3 + Q284351 7.00.970 SP 3 + Q283837/Q282243 7.00.961 Service Pack 3 7.00.921 SP 2 + Q283837 7.00.919 SP 2 + Q282243 7.00.918 SP 2 + Q280380 7.00.917 SP 2 + Q279180 7.00.910 SP 2 + Q275901 7.00.905 SP 2 + Q274266 7.00.889 SP 2 + Q243741 7.00.879 SP 2 + Q281185 7.00.857 SP 2 + Q260346 7.00.843 SP 2 + 7.00.842 Service Pack 2 7.00.835 Service Pack 2 Beta 7.00.776 SP 1 + Q258087 7.00.770 SP 1 + Q252905 7.00.745 SP 1 + Q253738 7.00.722 SP 1 + Q239458 7.00.699 Service Pack 1 7.00.689 Service Pack 1 Beta 7.00.677 MSDE in Office 2K Dev 7.00.662 Q232707 7.00.658 Q244763 7.00.657 Q229875 7.00.643 Q220156 7.00.623 RTM 15.2 SQL Server 2000 buildnumbers and service packs: ---------------------------------------------------- SQL Server 2000/MSDE 2 Main Releases 8.00.760 Service Pack 3 8.00.534 Service Pack 2 8.00.384 Service Pack 1 8.00.194 RTM All Releases 8.00.760 Service Pack 3 Also SP3a reports the 8.00.760 build number. 8.00.686 SP 2 + Q316333 8.00.679 SP 2 + Q316333 8.00.665 SP 2 + Q316333 8.00.655 SP 2 + Q316333 8.00.650 SP 2 + Q316333 8.00.644 SP 2 + Q324186 8.00.608 SP 2 + Q316333/Q356326/Q356938 8.00.578 SP 2 + Q316333 8.00.534 Service Pack 2 8.00.532 Service Pack 2 Beta 8.00.452 SP 1 + Q308547 8.00.444 SP 1 + Q307540/Q307655 8.00.443 SP 1 + Q307538 8.00.428 SP 1 + Q304850 8.00.384 Service Pack 1 8.00.287 Q297209 8.00.251 Q300194 8.00.250 Q291683 8.00.249 Q288122 8.00.239 Q285290 8.00.233 Q282416 8.00.231 Q282279 8.00.226 Q278239 8.00.225 Q281663 8.00.223 Q280380 8.00.222 Q281769 8.00.218 Q279183 8.00.217 Q279293/Q279296 8.00.211 Q276329 8.00.210 Q275900 8.00.205 Q274330 8.00.204 Q274329 8.00.194 RTM 15.3 Distinguishing Between SP3 and SP3a: ----------------------------------------- When you for instance run SELECT @@version from the QA, both sp3 and sp3a reports the same buildnumber 8.00.760 To determine whether you have SP3 or SP3a installed, look at the version number of the Net-Library file, Ssnetlib.dll. If the version number of this file is 2000.80.760.0, you have SP3; if the version number of this file is 2000.80.766.0, you have SP3a. ---------------------------------------------------------------------- 16. List of Character sets, sort order AND collations: ---------------------------------------------------------------------- SQL Server 2000 replaces code pages AND sort orders with collations. SQL Server 2000 includes support for most collations supported in earlier versions of SQL Server, AND introduces a new set of collations based on Windows collations. You can now specIFy collations at the database level or at the column level. Previously, code pages AND sort orders could be specIFied only at the server level AND applied to all databases on a server. Microsoft® SQL Server™ 2000 supports several collations. A collation encodes the rules governing the proper use of characters for either a language, such as Macedonian or Polish, or an alphabet, such as Latin1_General (the Latin alphabet used by western European languages). Each SQL Server collation specIFies three properties: - The sort order to use for Unicode data types (nchar, nvarchar, AND ntext). A sort order defines the sequence in which characters are sorted, AND the way characters are evaluated in comparison operations. - The sort order to use for non-Unicode character data types (char, varchar, AND text). - The code page used to store non-Unicode character data. The characterset Latin 1 (or ANSI), for example, which is referred to as ISO 8859-1 under SQL Server 7.0, is called ISO 1252 in SQL Server 2000's default settings. For SQL Server installations in use by western European languages uses one of the Latin1_General* collations. These collations are split into collations which are yes or no Case Sensitive (CS/CI), and yes or no Accent Sensitive (AS/AI) Example 1: ---------- -- Stel we maken twee databases aan, A and B, met verschillende collations: -- Database A: Latin1_general_CI_AS -- Database B: SQL_Latin1_General_CP1_CI_AS -- In A maken we een simple tabelletje aan met de naam "a": USE A GO create table a ( cust_id int, cust_name varchar(20) ) GO -- Even in a een testrecord invoeren: insert into a values (1,'Piet') GO -- In B maken we een overeenkomend simple tabelletje met de naam "b": USE B GO create table b ( cust_id int, cust_name varchar(20) ) GO -- Even in b een testrecord invoeren: insert into b values (1,'Piet') GO -- Query 1: -- Doe nu de volgende test query: select a.cust_name,b.cust_name from A.dbo.a a, B.dbo.b b where a.cust_name=b.cust_name -- SQL Server komt terug met een error: -- Server: Msg 446, Level 16, State 9, Line 1 -- Cannot resolve collation conflict for equal to operation. -- Query 2: -- Doe nu de volgende test query: select a.cust_name,b.cust_name from A.dbo.a a, B.dbo.b b where a.cust_id=b.cust_id cust_name cust_name -------------------- -------------------- Piet Piet (1 row(s) affected) -- Deze keer is er geen error -- Het is zo dat bij JOINS, of bij "Where" clauses met comparison operators als =, >, < etc..., dat deze collation gevoelig zijn als het gaat om char, varchar, nchar, nvarchar, text en ntext velden. Het is niet gevoelig voor getal gebaseerde columns (int etc..), zoals te zien is in het bovenstaande voorbeeld. Database B (of A) zou ook de (standaard aanwezige) SQLServer TEMPDB database kunnen zijn. Dus zelfs bij een database waarbij de queries (of stored procedures) alleen de eigen tabellen en kolommen gebruikt, ogenschijnlijk in dezelfde database, kunnen mogelijk collation errors optreden. Dit kan gebeuren als een stored procedure een temporary table (of table datatype) gebruikt (via create #tablename, create ##tablename, declare @table_type_name) welke een construct is in de TEMPDB database, die mogelijk een andere collation gebruikt, en dan kunnen dus collation errors optreden. Indien een "gevulde" database reeds een bepaalde collation heeft, dan is deze NIET gemakkelijk te veranderen naar een andere collation. Het komt er dan op neer om data uit te pompen, nieuwe db aanmaken (of de nu lege db te wijzigen), en de data weer inpompen (via bijv. bcp, dts etc..). Opmerking: Het statement "ALTER DATABASE .. COLLATE collation_name" werkt alleen bij een nog lege database. De problematische stored procedures en queries (zijn dus niet echt problematisch) kunnen echter omgebouwd worden zodat er geen errors optreden. We kunnen de Server namelijk de "regel / handleiding" meegeven om een van de set van tablekolommen te herleiden naar de andere collation. We kunnen Query 1 dan bijvoorbeeld als volgt herbouwen: select a.cust_name,b.cust_name from A.dbo.a a, B.dbo.b b where a.cust_name COLLATE SQL_Latin1_General_CP1_CI_AS =b.cust_name en er treden geen collation errors meer op. Indien er dus slechts enkele sp's zijn die goochelen met temporary tables, dan is de ombouw operatie best wel te overzien. Anderzijds kan mogelijk ook gekozen worden om CREATE #TEMPTABLE te vervangen door het juiste SELECT INTO.. statement daar deze de properties van source table overneemt. Tabellen met collation informatie: ---------------------------------- TABLE 1: -------- Sort order ID - SQL collation name 30 SQL_Latin1_General_Cp437_BIN 31 SQL_Latin1_General_Cp437_CS_AS 32 SQL_Latin1_General_Cp437_CI_AS 33 SQL_Latin1_General_Pref_CP437_CI_AS 34 SQL_Latin1_General_Cp437_CI_AI 40 SQL_Latin1_General_Cp850_BIN 41 SQL_Latin1_General_Cp850_CS_AS 42 SQL_Latin1_General_Cp850_CI_AS 43 SQL_Latin1_General_Pref_CP850_CI_AS 44 SQL_Latin1_General_Cp850_CI_AI 49 SQL_1Xcompat_CP850_CI_AS 50 Latin1_General_BIN 51 SQL_Latin1_General_Cp1_CS_AS 52 SQL_Latin1_General_Cp1_CI_AS 53 SQL_Latin1_General_Pref_CP1_CI_AS 54 SQL_Latin1_General_Cp1_CI_AI 55 SQL_AltDiction_Cp850_CS_AS 56 SQL_AltDiction_Pref_CP850_CI_AS 57 SQL_AltDiction_Cp850_CI_AI 58 SQL_ScANDinavian_Pref_Cp850_CI_AS 59 SQL_ScANDinavian_Cp850_CS_AS 60 SQL_ScANDinavian_Cp850_CI_AS 61 SQL_AltDiction_Cp850_CI_AS 71 Latin1_General_CS_AS 72 Latin1_General_CI_AS 73 Danish_Norwegian_CS_AS 74 Finnish_Swedish_CS_AS 75 IcelANDic_CS_AS 80 Hungarian_BIN (or Albanian_BIN, Czech_BIN, AND so on)1 81 SQL_Latin1_General_Cp1250_CS_AS 82 SQL_Latin1_General_Cp1250_CI_AS 83 SQL_Czech_Cp1250_CS_AS 84 SQL_Czech_Cp1250_CI_AS 85 SQL_Hungarian_Cp1250_CS_AS 86 SQL_Hungarian_Cp1250_CI_AS 87 SQL_Polish_Cp1250_CS_AS 88 SQL_Polish_Cp1250_CI_AS 89 SQL_Romanian_Cp1250_CS_AS 90 SQL_Romanian_Cp1250_CI_AS 91 SQL_Croatian_Cp1250_CS_AS 92 SQL_Croatian_Cp1250_CI_AS 93 SQL_Slovak_Cp1250_CS_AS 94 SQL_Slovak_Cp1250_CI_AS 95 SQL_Slovenian_Cp1250_CS_AS 96 SQL_Slovenian_Cp1250_CI_AS 104 Cyrillic_General_BIN (or Ukrainian_BIN, Macedonian_BIN) 105 SQL_Latin1_General_Cp1251_CS_AS 106 SQL_Latin1_General_Cp1251_CI_AS 107 SQL_Ukrainian_Cp1251_CS_AS 108 SQL_Ukrainian_Cp1251_CI_AS 112 Greek_BIN 113 SQL_Latin1_General_Cp1253_CS_AS 114 SQL_Latin1_General_Cp1253_CI_AS 120 SQL_MixDiction_Cp1253_CS_AS 121 SQL_AltDiction_Cp1253_CS_AS 124 SQL_Latin1_General_Cp1253_CI_AI 128 Turkish_BIN 129 SQL_Latin1_General_Cp1254_CS_AS 130 SQL_Latin1_General_Cp1254_CI_AS 136 Hebrew_BIN 137 SQL_Latin1_General_Cp1255_CS_AS 138 SQL_Latin1_General_Cp1255_CI_AS 144 Arabic_BIN 145 SQL_Latin1_General_Cp1256_CS_AS 146 SQL_Latin1_General_Cp1256_CI_AS 153 SQL_Latin1_General_Cp1257_CS_AS 154 SQL_Latin1_General_Cp1257_CI_AS 155 SQL_Estonian_Cp1257_CS_AS 156 SQL_Estonian_Cp1257_CI_AS 157 SQL_Latvian_Cp1257_CS_AS 158 SQL_Latvian_Cp1257_CI_AS 159 SQL_Lithuanian_Cp1257_CS_AS 160 SQL_Lithuanian_Cp1257_CI_AS 183 SQL_Danish_Pref_Cp1_CI_AS 184 SQL_SwedishPhone_Pref_Cp1_CI_AS 185 SQL_SwedishStd_Pref_Cp1_CI_AS 186 SQL_IcelANDic_Pref_Cp1_CI_AS 192 Japanese_BIN 193 Japanese_CI_AS 194 Korean_Wansung_BIN 195 Korean_Wansung_CI_AS 196 Chinese_Taiwan_Stroke_BIN 197 Chinese_Taiwan_Stroke_CI_AS 198 Chinese_PRC_BIN 199 Chinese_PRC_CI_AS 200 Japanese_CS_AS 201 Korean_Wansung_CS_AS 202 Chinese_Taiwan_Stroke_CS_AS 203 Chinese_PRC_CS_AS 204 Thai_BIN 205 Thai_CI_AS 206 Thai_CS_AS 210 SQL_EBCDIC037_CP1_CS_AS 211 SQL_EBCDIC273_CP1_CS_AS 212 SQL_EBCDIC277_CP1_CS_AS 213 SQL_EBCDIC278_CP1_CS_AS 214 SQL_EBCDIC280_CP1_CS_AS 215 SQL_EBCDIC284_CP1_CS_AS 216 SQL_EBCDIC285_CP1_CS_AS 217 SQL_EBCDIC297_CP1_CS_AS TABLE 2: -------- SQL7 sort order versus SQL2000 collation name 30 Binary order, for use with the 437 (U.S. English) character set. 31 Dictionary order, case-sensitive, for use with the 437 (U.S. English) character set. 32 Dictionary order, case-insensitive, for use with the 437 (U.S. English) character set. 33 Dictionary order, case-insensitive, uppercase preference, for use with the 437 (U.S. English) character set. 34 Dictionary order, case-insensitive, accent-insensitive, for use with the 437 (U.S. English) character set. 40 Binary order, for use with the 850 (Multilingual) character set. 41 Dictionary order, case-sensitive, for use with the 850 (Multilingual) character set. 42 Dictionary order, case-insensitive, for use with the 850 (Multilingual) character set. 43 Dictionary order, case-insensitive, uppercase preference, for use with the 850 (Multilingual) character set. 44 Dictionary order, case-insensitive, accent-insensitive, for use with the 850 (Multilingual) character set. 49 Strict compatibility with version 1.x case-insensitive databases, for use with the 850 (Multilingual) character set. 50 Binary order for use with 1252 character set. 51 Dictionary order, case-sensitive, for use with 1252 character set. 52 Dictionary order, case-insensitive, for use with 1252 character set. 53 Dictionary order, case-insensitive, uppercase preference, for use with 1252 character set. 54 Dictionary order, case-insensitive, accent-insensitive, for use with 1252 character set. 55 Alternate dictionary order, case-sensitive, for use with the 850 (Multilingual) character set. 56 Alternate dictionary order, case-insensitive, uppercase preference, for use with the 850 (Multilingual) character set. 57 Alternate dictionary order, case-insensitive, accent-insensitive, for use with the 850 (Multilingual) character set. 58 ScANDinavian dictionary order, case-insensitive, uppercase preference, for use with the 850 (Multilingual) character set. 59 ScANDinavian dictionary order, case-sensitive, for use with the 850 (Multilingual) character set. 60 ScANDinavian dictionary order, case-insensitive, for use with the 850 (Multilingual) character set. 61 Alternate dictionary order, case-insensitive, for use with the 850 (Multilingual) character set. 71 Latin-1 case-sensitive, for use with 1252 character set. 72 Latin-1 case-insensitive, for use with 1252 character set. 73 Danish/Norwegian case-sensitive sort order for code page 1252. 74 Finnish/Swedish case-sensitive sort order for code page 1252. 75 IcelANDic case-sensitive sort order for code page 1252. 80 Binary order, for use with the 1250 (Central European) character set. 81 Dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 82 Dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 83 Czech dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 84 Czech dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 85 Hungarian dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 86 Hungarian dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 87 Polish dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 88 Polish dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 89 Romanian dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 90 Romanian dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 91 Croatian dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 92 Croatian dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 93 Slovak dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 94 Slovak dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 95 Slovenian dictionary order, case-sensitive, for use with the 1250 (Central European) character set. 96 Slovenian dictionary order, case-insensitive, for use with the 1250 (Central European) character set. 97 Windows Polish case-sensitive sort order for code page 1250. 98 Windows Polish case-insensitive sort order for code page 1250. 104 Binary order, for use with the 1251 (Cyrillic) character set. 105 Dictionary order, case-sensitive, for use with the 1251 (Cyrillic) character set. 106 Dictionary order, case-insensitive, for use with the 1251 (Cyrillic) character set. 107 Ukrainian dictionary order, case-sensitive, for use with the 1251 (Cyrillic) character set. 108 Ukrainian dictionary order, case-insensitive, for use with the 1251 (Cyrillic) character set. 112 Binary order, for use with the 1253 (Greek) character set. 113 Dictionary order, case-sensitive, for use with the 1253 (Greek) character set. 114 Dictionary order, case-insensitive, for use with the 1253 (Greek) character set. 120 Mixed dictionary order, for use with the 1253 (Greek) character set. 121 Dictionary order, case-sensitive, accent-sensitive, for use with the 1253 (Greek) character set. 124 Dictionary order, case-insensitive, accent-insensitive, for use with the 1253 (Greek) character set. 128 Binary order, for use with the 1254 (Turkish) character set. 129 Dictionary order, case-sensitive, for use with the 1254 (Turkish) character set. 130 Dictionary order, case-insensitive, for use with the 1254 (Turkish) character set. 136 Binary order, for use with the 1255 (Hebrew) character set. 137 Dictionary order, case-sensitive, for use with the 1255 (Hebrew) character set. 138 Dictionary order, case-insensitive, for use with the 1255 (Hebrew) character set. 144 Binary order, for use with the 1256 (Arabic) character set. 145 Dictionary order, case-sensitive, for use with the 1256 (Arabic) character set. 146 Dictionary order, case-insensitive, for use with the 1256 (Arabic) character set. 152 Binary order, for use with the 1257 (Baltic) character set. 153 Dictionary order, case-sensitive, for use with the 1257 (Baltic) character set. 154 Dictionary order, case-insensitive, for use with the 1257 (Baltic) character set. 155 Estonian dictionary order, case-sensitive, for use with the 1257 (Baltic) character set. 156 Estonian dictionary order, case-insensitive, for use with the 1257 (Baltic) character set. 157 Latvian dictionary order, case-sensitive, for use with the 1257 (Baltic) character set. 158 Latvian dictionary order, case-insensitive, for use with the 1257 (Baltic) character set. 159 Lithuanian dictionary order, case-sensitive, for use with the 1257 (Baltic) character set. 160 Lithuanian dictionary order, case-insensitive, for use with the 1257 (Baltic) character set. 183 Danish/Norwegian dictionary order, case-insensitive, uppercase preference, for use with 1252 character set. 184 Swedish/Finnish (StANDard) dictionary order, case-insensitive, uppercase preference, for use with 1252 character set. 185 Swedish/Finnish (Phone) dictionary order, case-insensitive, uppercase preference, for use with 1252 character set. 186 IcelANDic dictionary order, case-insensitive, uppercase preference, for use with 1252 character set. 192 Binary order, for use with the 932 (Japanese) character set. 193 Dictionary order, case-insensitive, for use with the 932 (Japanese) character set 194 Binary order, for use with the 949 (Korean) character set. 195 Dictionary order, case-insensitive, for use with the 949 (Korean) character set. 196 Binary order, for use with the 950 (Traditional Chinese) character set. 197 Dictionary order, case-insensitive, for use with the 950 (Traditional Chinese) character set. 198 Binary order, for use with the 936 (SimplIFied Chinese) character set. 199 Dictionary order, case-insensitive, for use with the 936 (SimplIFied Chinese) character set. 200 Dictionary order, case-sensitive, for use with the 932 (Japanese) character set. 201 Dictionary order, case-sensitive, for use with the 949 (Korean) character set. 202 Dictionary order, case-sensitive, for use with the 950 (Traditional Chinese) character set. 203 Dictionary order, case-sensitive, for use with the 936 (SimplIFied Chinese) character set. 204 Binary order, for use with the 874 (Thai) character set. 205 Dictionary order, case-insensitive, for use with the 874 (Thai) character set. 206 Dictionary order, case-sensitive, for use with the 874 (Thai) character set. TABLE 3: -------- Locale_ID Name 1033 General Unicode 33280 Binary Order 1027 Catalan 197636 Chinese Bopomofo (Taiwan Region) 2052 Chinese Punctuation 133124 Chinese Stroke Count 1028 Chinese Stroke Count (Taiwan Region) 1050 Croatian 1029 Czech 1043 Dutch 1061 Estonian 1036 French 66615 Georgian Modern 1031 German 66567 German Phone Book 1038 Hungarian 66574 Hungarian Technical 1039 IcelANDic 1040 Italian 1041 Japanese 66577 Japanese Unicode 1042 Korean 66578 Korean Unicode 1062 Latvian 1063 Lithuanian 1071 Macedonian 1044 Norwegian/Danish 1045 Polish 1046 Portuguese 1048 Romanian 1051 Slovak 1060 Slovenian 1034 Spanish Traditional 3082 Spanish Modern 1053 Swedish/Finnish 1054 Thai 2057 UK English 1058 Ukrainian 1066 Vietnamese ---------------------------------------------------------------------- 17. Trace flags: ---------------------------------------------------------------------- Trace flags can either be set upon startup of SQL Server by using the -Ttrace# option upon SQL Server startup or by using the DBCC TRACEON console commAND. Either way the trace flag will be active until SQL Server is restarted or you use the DBCC TRACEOFF console commAND to turn the trace flag off. 1 Sets trace flags for all client connections, rather than for a single client connection. Because trace flags set using the -T commAND-line option automatically apply to all connections, this trace flag is used only when setting trace flags using DBCC TRACEON AND DBCC TRACEOFF. 106 Disables line number information for syntax errors. 107 Interprets numbers with a decimal point as float instead of decimal. 205 Report when a statistics-depENDent stored procedure is being recompiled as a result of AutoStat. 206 Provides backward compatibility for the setuser statement. 208 SET QUOTED IDENTIFIER ON. 242 Provides backward compatibility for correlated subqueries WHERE non-ANSI-stANDard results are desired. 243 The behavior of SQL Server is now more consistent because nullability checks are made at run time AND a nullability violation results in the commAND terminating AND the batch or transaction process continuing. 244 Disables checking for allowed interim constraint violations. By default, SQL Server checks for AND allows interim constraint violations. An interim constraint violation is caused by a change that removes the violation such that the constraint is met, all within a single statement AND transaction. SQL Server checks for interim constraint violations for self-referencing DELETE statements, INSERT, AND multirow UPDATE statements. This checking requires more work tables. With this trace flag you can disallow interim constraint violations, thus requiring fewer work tables. 257 Will invoke a print algorithm on the XML output before returning it to make the XML result more readable. 260 Prints the versioning information about extENDed stored procedure dlls. 302 Prints information about whether the statistics page is used, the actual SELECTivity (IF available), AND what SQL Server estimated the physical AND logical I/O would be for the indexes. Trace flag 302 should be used with trace flag 310 to show the actual join ordering. 310 Prints information about join order. Index SELECTion information is also available in a more readable format using SET SHOWPLAN_ALL, as described in the SET statement. 325 Prints information about the cost of using a nonclustered index or a sort to process an ORDER BY clause. 326 Prints information about the estimated AND actual cost of sorts. 330 Enables full output when using the SET SHOWPLAN_ALL option, which gives detailed information about joins. 506 Enforces SQL-92 stANDards regarding null values for comparisons between variables AND parameters. Any comparison of variables AND parameters that contain a NULL always results in a NULL. 652 Disables read ahead for the server. 653 Disables read ahead for the current connection. 809 Limits the amount of Lazy Write activity in SQL Server 2000. 1180 Forces allocation to use free pages for text or image data AND maintain efficiency of storage. 1200 Prints lock information (the process ID AND type of lock requested). 1204 Returns the type of lock participating in the deadlock AND the current commAND affect by the deadlock. 1205 Returns more detailed information about the commAND being executed at the time of a deadlock. 1206 Used to complement flag 1204 by displaying other locks held by deadlock parties 1609 Turns on the unpacking AND checking of remote procedure call (RPC) information in Open Data Services. Used only when applications depEND on the old behavior. 1704 Prints information when a temporary table is created or dropped. 1807 Allows you to configure SQL Server with network-based database files. 2505 Prevents DBCC TRACEON 208, SPID 10 errors FROM appearing in the error log. 2508 Disables parallel non-clustered index checking for DBCC CHECKTABLE. 2509 Used with DBCC CHECKTABLE.html to see the total count of ghost records in a table 2528 Disables parallel checking of objects by DBCC commANDs. 2701 Sets the @@ERROR system function to 50000 for RAISERROR messages with severity levels of 10 or less. When disabled, sets the @@ERROR system function to 0 for RAISERROR messages with severity levels of 10 or less. 3104 Causes SQL Server to bypass checking for free space. 3111 Cause LogMgr::ValidateBackedupBlock to be skipped during backup AND restore operations. 3205 Disables hardware compression for tape drivers. 3222 Disables the read ahead that is used by the recovery operation during roll forward operations. 3502 Prints a message to the log at the start AND END of each checkpoint. 3503 Indicates whether the checkpoint at the END of automatic recovery was skipped for a database (this applies only to read-only databases). 3602 Records all error AND warning messages sent to the client. 3604 SENDs trace output to the client. Used only when setting trace flags with DBCC TRACEON AND DBCC TRACEOFF. 3605 SENDs trace output to the error log. (IF you start SQL Server FROM the commAND prompt, the output also appears on the screen.) 3607 Skips automatic recovery (at startup) for all databases. 3608 Skips automatic recovery (at startup) for all databases except the master database. 3609 Skips the creation of the tempdb database at startup. Use this trace flag IF the device or devices on which tempdb resides are problematic or problems exist in the model database. 3626 Turns on tracking of the CPU data for the sysprocesses table. 3640 Eliminates the sENDing of DONE_IN_PROC messages to the client for each statement in a stored procedure. This is similar to the session setting of SET NOCOUNT ON, but when set as a trace flag, every client session is hANDled this way. 4022 Bypasses automatically started procedures. 4030 Prints both a byte AND ASCII representation of the receive buffer. Used when you want to see what queries a client is sENDing to SQL Server. You can use this trace flag IF you experience a protection violation AND want to determine which statement caused it. Typically, you can set this flag globally or use SQL Server Enterprise Manager. You can also use DBCC INPUTBUFFER. 4031 Prints both a byte AND ASCII representation of the sEND buffers (what SQL Server sENDs back to the client). You can also use DBCC OUTPUTBUFFER. 4032 Traces the SQL commANDs coming in FROM the client. The output destination of the trace flag is controlled with the 3605/3604 trace flags. 7300 Retrieves extENDed information about any error you encounter when you execute a distributed query. 7501 Dynamic cursors are used by default on forward-only cursors. Dynamic cursors are faster than in earlier versions AND no longer require unique indexes. This flag disables the dynamic cursor enhancements AND reverts to version 6.0 behavior. 7502 Disables the caching of cursor plans for extENDed stored procedures. 7505 Enables version 6.x hANDling of return codes when calling dbcursorfetchex AND the resulting cursor position follows the END of the cursor result set. 7525 Reverts to the SQL Server 7.0 behavior of closing nonstatic cursors regardless of the SET CURSOR_CLOSE_ON_COMMIT state in SQL Server 2000. 8202 Replicates all UPDATE commANDs as DELETE/INSERT pairs at the publisher. 8206 Supports stored procedure execution with a user specIFied owner name for SQL Server subscribers or without owner qualIFication for heterogeneous subscribers in SQL Server 2000. 8207 Enables singleton updates for Transactional Replication, released with SQL Server 2000 Service Pack 1. 8599 Allows you to use a savepoint within a distributed transaction. 8679 Prevents the SQL Server optimizer FROM using a Hash Match Team operator. 8687 Used to disable query parallelism. 8721 Dumps information into the error log when AutoStat has been run. 8783 Allows DELETE, INSERT, AND UPDATE statements to honor the SET ROWCOUNT ON setting when enabled. 8816 Logs every two-digit year conversion to a four-digit year. ---------------------------------------------------------------------- 18. Auditing examples: ---------------------------------------------------------------------- TEST 1: ======= -- TEST TABLE TO AUDIT: CREATE TABLE TEST_PRODUCTS ( prod_id int not null primary key, prod_name varchar(20) ) INSERT INTO TEST_PRODUCTS values (1,'auto') -- TEST AUDIT TRAIL TABLE: CREATE TABLE TEST_AUDIT ( AuditTrailID Int IDENTITY (1, 1) NOT NULL, TableName VarChar (50) NULL, ActionDate DateTime NULL, type varchar(1) NULL, name VarChar (128) NULL, spid int NULL, cinfo varbinary(128) NULL ) -- PROCEDURE TO GET USERNAME: create procedure set_c1 @name varchar(128) as declare @x varbinary(128) SELECT @x=convert(varbinary(128),@name) set context_info @x -- TRIGGERS ON TEST_PRODUCTS: create trigger tr_audit_ins on TEST_PRODUCTS for INSERT as declare @UserName varchar(128) declare @cinfo varbinary(128) SELECT @cinfo=(SELECT context_info FROM master.dbo.sysprocesses WHERE spid=@@spid) set @username=convert(varchar(128),@cinfo) INSERT INTO TEST_AUDIT (TableName,ActionDate,type,name,spid,cinfo) VALUES ('Products',GetDate(),'I',@username,@@spid,@cinfo) create trigger tr_audit_del on TEST_PRODUCTS for DELETE as declare @UserName varchar(128) declare @cinfo varbinary(128) SELECT @cinfo=(SELECT context_info FROM master.dbo.sysprocesses WHERE spid=@@spid) set @username=convert(varchar(128),@cinfo) INSERT INTO TEST_AUDIT (TableName,ActionDate,type,name,spid,cinfo) VALUES ('Products',GetDate(),'D',@username,@@spid,@cinfo) create trigger tr_audit_up on TEST_PRODUCTS for update as declare @UserName varchar(128) declare @cinfo varbinary(128) SELECT @cinfo=(SELECT context_info FROM master.dbo.sysprocesses WHERE spid=@@spid) set @username=convert(varchar(128),@cinfo) INSERT INTO TEST_AUDIT (TableName,ActionDate,type,name,spid,cinfo) VALUES ('Products',GetDate(),'U',@username,@@spid,@cinfo) -------------------------------------------------------------- TEST 2: ======= CREATE TABLE dbo.AuditTrail ( AuditTrailID Int IDENTITY (1, 1) NOT NULL, TableName VarChar (50) NOT NULL, ActionTaken Char (1) NOT NULL, ActionUser VarChar (50) NOT NULL, ActionDate DateTime NOT NULL ) ON [PRIMARY] GO CREATE TRIGGER [AuditINSERTUpdate] ON dbo.Products FOR INSERT, UPDATE AS INSERT INTO AuditTrail (TableName, ActionTaken, ActionUser, ActionDate) VALUES ('Products', 'I', User_Name(), GetDate()) UPDATE dbo.Products SET UnitPrice = 1 WHERE ProductID = 1 -------------------------------------------------------------- TEST 3: ======= CREATE TRIGGER "audit_trigger_Orders" ON "Orders" FOR INSERT, UPDATE, DELETE NOT FOR REPLICATION AS DECLARE @TrigTime DateTime set @TrigTime = getDate() UPDATE audit_Orders SET audit_ENDdatetime = (@TrigTime), audit_ENDappname = (APP_Name()), audit_ENDusername = (USER_Name()), audit_ENDhostname = (HOST_NAME()) FROM DELETEd,audit_Orders WHERE audit_Orders.OrderID = DELETEd.OrderID AND audit_ENDdatetime = '9/9/9999' INSERT INTO audit_Orders ( OrderID, CustomerID, EmployeeID, OrderDate, RequiredDate, ShippedDate, ShipVia, Freight, ShipName, ShipAddress, ShipCity, ShipRegion, ShipPostalCode, ShipCountry, audit_startdatetime, audit_ENDdatetime, audit_startusername, audit_startappname, audit_starthostname ) SELECT OrderID, CustomerID, EmployeeID, OrderDate, RequiredDate, ShippedDate, ShipVia, Freight, ShipName, ShipAddress, ShipCity, ShipRegion, ShipPostalCode, ShipCountry, @TrigTime, '9/9/9999', user_name(), app_name(), host_name() FROM INSERTed -------------------------------------------------------------- TEST 4: ======= create table trigtest ( i_int_key int not null, j_int_key int not null, s_varchar varchar(10), t_char varchar(10), d_date datetime ) go alter table trigtest add constraint pk primary key (i_int_key, j_int_key) go create table trigtest_au ( i_int_key int not null, j_int_key int not null, s_varchar varchar(10), t_char varchar(10), d_date datetime, UpdateDate datetime, UserName varchar(128), type varchar(10) ) go create trigger tr_au_trigtest on trigtest for update, DELETE as declare @type varchar(1) , @UpdateDate datetime , @UserName varchar(128) IF exists (SELECT * FROM INSERTed) SELECT @type = 'U' else SELECT @type = 'D' SELECT @UpdateDate = getdate() , @UserName = system_user INSERT trigtest_au (i_int_key, j_int_key, s_varchar, t_char, d_date, UpdateDate, UserName, type) SELECT i_int_key, j_int_key, s_varchar, t_char, d_date, @UpdateDate, @UserName, @type + '_old' FROM DELETEd go create trigger tr_au_trigtest on trigtest for INSERT, update, DELETE as declare @type varchar(1) , @UpdateDate datetime , @UserName varchar(128) IF exists (SELECT * FROM INSERTed) AND exists (SELECT * FROM DELETEd) SELECT @type = 'U' else IF exists (SELECT * FROM INSERTed) SELECT @type = 'I' else SELECT @type = 'D' SELECT @UpdateDate = getdate() , @UserName = system_user INSERT trigtest_au (i_int_key, j_int_key, s_varchar, t_char, d_date, UpdateDate, UserName, type) SELECT i_int_key, j_int_key, s_varchar, t_char, d_date, @UpdateDate, @UserName, @type + '_old' FROM DELETEd INSERT trigtest_au (i_int_key, j_int_key, s_varchar, t_char, d_date, UpdateDate, UserName, type) SELECT i_int_key, j_int_key, s_varchar, t_char, d_date, @UpdateDate, @UserName, @type + '_new' FROM INSERTed go -------------------------------------------------------------- TEST 5: ======= CREATE TABLE dbo.CUSTOMER_AUDIT ( customerid int not null, action char not null, modIFieddate datetime not null, modIFiedby sysname not null ) CREATE CLUSTERED INDEX IE_CUSTOMER_AUDIT_00 ON dbo.CUSTOMER_AUDIT ( customerid ) We can now create a trigger that audits each DML statement, regardless of whether it originated FROM a stored procedure or someWHERE else. CREATE TRIGGER dbo.CUSTOMER_AUDIT_IUD ON dbo.CUSTOMER FOR INSERT, UPDATE, DELETE AS DECLARE @i int, @d int, @action char IF (@@ROWCOUNT = 0) RETURN SELECT @i = COUNT(*) FROM INSERTed SELECT @d = COUNT(*) FROM DELETEd SELECT @action = CASE WHEN (@i != 0) AND (@d = 0) THEN 'I' WHEN (@i != 0) AND (@d != 0) THEN 'U' WHEN (@i = 0) AND (@d != 0) THEN 'D' END IF (@action IN ( 'I', 'U' )) INSERT INTO dbo.CUSTOMER_AUDIT ( customerid, action, modIFieddate, modIFiedby ) SELECT customerid, @action, current_timestamp, suser_sname() FROM INSERTed ELSE INSERT INTO dbo.CUSTOMER_AUDIT ( customerid, action, modIFieddate, modIFiedby ) SELECT customerid, @action, current_timestamp, suser_sname() FROM DELETEd RETURN ---------------------------------------------------------------------- 19. Impersonation and Authentication Delegation: ---------------------------------------------------------------------- 19.1. What is Authentication Delegation: ---------------------------------------- Impersonation. -------------- This mechanism allows a server process to run using the security credentials of the client. When the server is impersonating the client, any operations performed by the server are performed using the client's credentials. Impersonation does not allow the server to access remote resources on behalf of the client. This requires delegation. Delegation. ----------- Like impersonation, delegation allows a server process to run using the security credentials of the client. However, delegation is more powerful and allows the server process to make calls to other computers while acting as the client. Security account delegation is the ability to connect to multiple servers and, with each server change, to retain the authentication credentials of the original client. For example, if a user (LONDON\joetuck) connects to ServerA, which then connects to ServerB, ServerB knows that the connection security identity is LONDON\joetuck. Delegation is the act of allowing a service to impersonate a user account or a computer account to access resources throughout the network. In an N-tier program, the user authenticates to a middle-tier service. The middle-tier service authenticates to a back-end data server on behalf of the user. Delegation depends on the middle-tier service that is being trusted for delegation. If this server is set to "Trusted for delegation", the service can impersonate a user to use other network services. For example, a user runs a Web program and that Web program uses several different SQL databases that exist on different servers. When the user authenticates to a server (the front-end server) that is trusted for delegation, the server can access the SQL database on the other servers as the user. Because the server that is "trusted for delegation" has the user's ticket-granting ticket (TGT), it can authenticate to any service on the network. In Windows Server 2003, you can control the services that can impersonate the user by using constrained delegation. 19.2. Use of IIS (ASP and ASP.NET) and SQLServer: ------------------------------------------------- If the SQL and IIS server are separate boxes, you *can't* authenticate a browser client against an SQL backend without Kerberos delegation setup correctly, NT does not support delegation of access tokens. This requires an W2K/W2K3 AD realm , and IE 5.x or higher clients. The client machines must be members of the W2K/W2K3 domain. The domain "accounts" must have "Delegation" enabled, and the IIS server "machine" account must be "trusted for delegation". You also have to register SQL server in the AD (see Books online). Consider also that: - IIS may not run on a DC, as a DC cannot be trusted for delegation. - you throw away SQL connection pooling, as connections must carry the same credentials for pooling to work. 19.3. More information: ------------------------ 1. Delegated Authentication Windows services impersonate clients when accessing resources on their behalf. In many cases, a service can complete its work for the client by accessing resources on the local computer. Both NTLM and Kerberos provide the information that a service needs to impersonate its client locally. However, some distributed applications are designed so that a front-end service must impersonate clients when connecting to back-end services on other computers. The Kerberos protocol has a proxy mechanism that allows a service to impersonate its client when connecting to other services. No equivalent is available with NTLM. Kerberos authentication generates a delegate-level token, as long as the following two conditions are met: -> The account that you are trying to delegate is not marked "sensitive and cannot be delegated" in the Active Directory. -> The principal account against which you are authenticating (the user account under which the server process is running) is marked "Trusted for delegation" in the Active Directory. A typical scenario in which you may want to delegate user credentials is if a computer (Computer A) that has Microsoft Internet Explorer installed requests Active Server Pages (ASP) pages FROM a Microsoft Internet Information Server (IIS) Web server on a second computer (Computer B), and the ASP pages invoke Component Object Model (COM)/COM+ components on a third computer (Computer C). You want the COM/COM+ application to see the identity of the user that is logged on to the first computer. Computer A Computer B Computer C Internet Explorer Internet Information Server COM/COM+ components User A User B User C For delegation to work in this scenario, clear the "Account is sensitive and cannot be delegated" check box for User A, and SELECT the "Trusted for delegation" check box for Computer B. After you configure these settings for User A and Computer B, the COM/COM+ application on Computer C can see the identity of the user who is logged on to Computer A. ---------------------------------------------------------------------- 20. Transaction Isolation levels: ---------------------------------------------------------------------- SET TRANSACTION ISOLATION LEVEL Controls the default transaction locking behavior for all SELECT statements issued by a connection. Syntax SET TRANSACTION ISOLATION LEVEL { READ COMMITTED | READ UNCOMMITTED | REPEATABLE READ | SERIALIZABLE } The isolation property is one of the four ACID properties a logical unit of work must display to qualify as a transaction. It is the ability to shield transactions from the effects of updates performed by other concurrent transactions. The level of isolation is actually customizable for each transaction. Microsoft® SQL Server™ supports the transaction isolation levels defined in SQL-92. Setting transaction isolation levels allows programmers to trade off increased risk of certain integrity problems with support for greater concurrent access to data. Each isolation level offers more isolation than the previous level, but does so by holding more restrictive locks for longer periods. The transaction isolation levels are: READ UNCOMMITTED READ COMMITTED REPEATABLE READ SERIALIZABLE Transaction isolation levels can be set using Transact-SQL or through a database API: READ COMMITTED Specifies that shared locks are held while the data is being read to avoid dirty reads, but the data can be changed before the end of the transaction, resulting in nonrepeatable reads or phantom data. This option is the SQL Server default. READ UNCOMMITTED Implements dirty read, or isolation level 0 locking, which means that no shared locks are issued and no exclusive locks are honored. When this option is set, it is possible to read uncommitted or dirty data; values in the data can be changed and rows can appear or disappear in the data set before the end of the transaction. This option has the same effect as setting NOLOCK on all tables in all SELECT statements in a transaction. This is the least restrictive of the four isolation levels. REPEATABLE READ Locks are placed on all data that is used in a query, preventing other users from updating the data, but new phantom rows can be inserted into the data set by another user and are included in later reads in the current transaction. Because concurrency is lower than the default isolation level, use this option only when necessary. SERIALIZABLE Places a range lock on the data set, preventing other users from updating or inserting rows into the data set until the transaction is complete. This is the most restrictive of the four isolation levels. Because concurrency is lower, use this option only when necessary. This option has the same effect as setting HOLDLOCK on all tables in all SELECT statements in a transaction. Remarks Only one of the options can be set at a time, and it remains set for that connection until it is explicitly changed. This becomes the default behavior unless an optimization option is specified at the table level in the FROM clause of the statement. The setting of SET TRANSACTION ISOLATION LEVEL is set at execute or run time and not at parse time. Examples This example sets the TRANSACTION ISOLATION LEVEL for the session. For each Transact-SQL statement that follows, SQL Server holds all of the shared locks until the end of the transaction. SET TRANSACTION ISOLATION LEVEL REPEATABLE READ GO BEGIN TRANSACTION SELECT * FROM publishers SELECT * FROM authors ... COMMIT TRANSACTION Transact-SQL Transact-SQL scripts and DB-Library applications use the SET TRANSACTION ISOLATION LEVEL statement. ADO ADO applications set the IsolationLevel property of the Connection object to adXactReadUncommitted, adXactReadCommitted, adXactRepeatableRead, or adXactReadSerializable. OLE DB OLE DB applications call ITransactionLocal::StartTransaction with isoLevel set to ISOLATIONLEVEL_READUNCOMMITTED, ISOLATIONLEVEL_READCOMMITTED, ISOLATIONLEVEL_REPEATABLEREAD, or ISOLATIONLEVEL_SERIALIZABLE ODBC ODBC applications call SQLSetConnectAttr with Attribute set to SQL_ATTR_TXN_ISOLATION and ValuePtr set to SQL_TXN_READ_UNCOMMITTED, SQL_TXN_READ_COMMITTED, SQL_TXN_REPEATABLE_READ, or SQL_TXN_SERIALIZABLE. Over de mogelijke "locking mechanismen/isolatin levels" in SQL2000, en het mogelijk toepasbaar zijn in de iamv applicatie, lees dan aub het volgende: Er zijn 4 mogelijke transaction isolation levels: READ UNCOMMITTED (minst restrictieve: bestaat eigenlijk alleen omdat het in het ansi protocol voorkomt) READ COMMITTED (default) REPEATABLE READ SERIALIZABLE (meest restrictieve level) Stel we hebben joop en klaas en we maken nu table SALES: create table sales ( id int, product varchar(10) ) insert into sales values (1,'Boeken') insert into sales values (2,'Bier') insert into sales values (3,'koekjes') etc.. Stel joop en piet zijn bezig, en onafhankelijk doen ze select, insert en update statements Je ziet dan echt bijna nooit een locking effect, zelfs al zou bijvoorbeeld piet hebben gedaan set transaction isolation level SERIALIZABLE update sales set product='kaakjes' where id=3 Meestal is de "vlotheid" van de atomische statements (impliciete transactions) zo, dat locks bestaan voor de duration van milliseconds ofzo. Maar nu doet piet dit: set transaction isolation level SERIALIZABLE BEGIN TRANSACTION XYZ update sales set product='snoepies' where id=4 Wat Karel nu ook probeert te doen, bijv. een insert van een nieuwe rij of een update van een andere rij, het wordt nimmer doorgevoerd totdat piet een COMMIT uitvoert op transactie XYZ. sp_lock laat dan bijvoorbeeld zien (vooraf piet's commit): spid dbid ObjId IndId Type Resource Mode Status ------ ------ ----------- ------ ---- ---------------- -------- ------ 52 7 0 0 DB S GRANT 53 7 0 0 DB S GRANT 53(piet) 7 1977058079 0 TAB X GRANT 54(karel) 7 1977058079 0 TAB IX WAIT 54 7 0 0 DB S GRANT 55 1 85575343 0 TAB IS GRANT ---------------------------------------------------------------------- 21. CREATE A DATABASE REPORT: ---------------------------------------------------------------------- DECLARE @NO_OF_OBJ INT PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 1: NUMBER OF DIFFERENT OBJECTS IN DATABASE:' PRINT '-- -----------------------------------------------------' select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='U') PRINT 'NO OF TABLES: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='V') PRINT 'NO OF VIEWS: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='P') PRINT 'NO OF STORED PROCEDURES: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='TR') PRINT 'NO OF TRIGGERS: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='C') PRINT 'NO OF CHECK CONSTRAINTS: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='D') PRINT 'NO OF DEFAULT CONSTRAINTS: '+convert(varchar(32),@NO_OF_OBJ) select @NO_OF_OBJ=(select count(*) from sysobjects where xtype='R') PRINT 'NO OF RULES: '+convert(varchar(32),@NO_OF_OBJ) PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 2: COUNT OF NO OF RECORDS IN ALL TABLES OF THE DATABASE:' PRINT '-- -----------------------------------------------------' SET NOCOUNT ON declare @TABLE VARCHAR(64) declare @num INT declare c1 cursor for select name from sysobjects where xtype='U' order by name open c1 fetch next from c1 into @table while (@@fetch_status<>-1) begin print 'count records for: '+@table exec('select count(*) from '+@table) fetch next from c1 into @table end close c1 deallocate c1 PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 3: LIST OF ALL PRIMARY KEYS:' PRINT '-- -----------------------------------------------------' SELECT substring(name,1,30) AS "PrimaryKey", id, xtype, object_name(parent_obj) AS "Parent_table" FROM sysobjects WHERE xtype='PK' ORDER BY object_name(parent_obj) PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 4: LIST OF ALL FOREIGN KEYS:' PRINT '-- -----------------------------------------------------' PRINT 'LIST 1:' PRINT '-------' SELECT substring(name, 1, 40) as "ForeignKey", substring(object_name(parent_obj), 1, 30) as "TableWithFK" FROM sysobjects o, sysreferences r WHERE o.type='F' AND o.name=object_name(r.constid) ORDER BY name PRINT 'LIST 2:' PRINT '-------' SELECT substring(object_name(constid), 1, 40) AS FK, substring(object_name(fkeyid), 1, 30) AS "Referencing Table", substring(object_name(rkeyid), 1, 30) AS "Referenced Table" FROM sysreferences ORDER BY object_name(fkeyid) PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 5: LIST OF COLUMNS AND DATATYPES:' PRINT '-- -----------------------------------------------------' PRINT 'LIST OF ALL COLUMNS AND DATATYPES:' PRINT '----------------------------------' SELECT substring(c.name, 1, 30) as "ColumName", c.xtype, substring(object_name(c.id),1,30) as "TableName", substring(t.name,1,30) as "DataType" FROM syscolumns c, systypes t WHERE c.xtype=t.xtype AND object_name(c.id) in (SELECT name FROM sysobjects WHERE xtype='U') AND t.name not like '%sysname%' ORDER BY object_name(c.id) PRINT '-- -----------------------------------------------------' PRINT '-- CHECK 6: LIST OF DATABASE OPTIONS AND COLLATION:' PRINT '-- -----------------------------------------------------' PRINT 'DATABASE OPTIONS:' PRINT '-----------------' exec sp_dboption PRINT 'DATABASE COLLATION:' PRINT '-------------------' exec sp_helpsort PRINT 'END OF LISTING.' -- END OF FILE ---------------------------------------------------------------------- 22. SOME BACKUP SCRIPTS ---------------------------------------------------------------------- SCRIPT 1: ========= SET QUOTED_IDENTIFIER OFF GO SET ANSI_NULLS ON GO create procedure usp_build_restore_script as -- -- This stored procedure was written by Greg Larsen for Washington State Department of Health. -- Date: 12/16/2001 -- -- Description: -- This stored procedure generates TSQL script that will restore all the databases -- on the current SQL Server. This stored procedure takes into account when the last -- full and differential backups where taken, and how many transaction log backups -- have been taken since the last database backup, based on the information in -- the msdb database. -- -- Modified: -- -- -- Declare variables used in SP declare @cmd nvarchar (1000) declare @cmd1 nvarchar (1000) declare @db nvarchar(128) declare @filename nvarchar(128) declare @cnt int declare @num_processed int declare @name nvarchar(128) declare @physical_device_name nvarchar(128) declare @backup_start_date datetime declare @type char(1) -- Turn off the row number message set nocount on -- SECTION 1 ---------------------------------------------- -- Define cursor to hold all the different databases for the restore script will be built declare db cursor for select name from master..sysdatabases where name not in ('tempdb') -- Create a global temporary table that will hold the name of the backup, the database name, and the type of database backup. create table ##backupnames ( name nvarchar(100), database_name nvarchar(100), type char(1) ) -- Open cursor containing list of database names. open db fetch next from db into @db -- Process until no more databases are left WHILE @@FETCH_STATUS = 0 BEGIN -- Subsection 1A -------------------------------------------- -- initialize the physical device name set @physical_device_name = '' -- get the name of the last full database backup select @physical_device_name = physical_device_name , @backup_start_date = backup_start_date from msdb..backupset a join msdb..backupmediaset b on a.media_set_id = b.media_set_id join msdb..backupmediafamily c on a.media_set_id = c.media_set_id where type='d' and backup_start_date = (select top 1 backup_start_date from msdb..backupset where @db = database_name and type = 'd' order by backup_start_date desc) -- Did a full database backup name get found if @physical_device_name <> '' begin -- Build command to place a record in table that holds backup names select @cmd = 'insert into ##backupnames values (' + char(39) + @physical_device_name + char(39) + ',' + char(39) + @db + char(39) + ',' + char(39) + 'd' + char(39)+ ')' -- Execute command to place a record in table that holds backup names exec sp_executesql @cmd end -- Subsection 1B -------------------------------------------- -- Reset the physical device name set @physical_device_name = '' -- Find the last differential database backup select @physical_device_name = physical_device_name, @backup_start_date = backup_start_date from msdb..backupset a join msdb..backupmediaset b on a.media_set_id = b.media_set_id join msdb..backupmediafamily c on a.media_set_id = c.media_set_id where type='i' and backup_start_date = (select top 1 backup_start_date from msdb..backupset where @db = database_name and type = 'I' and backup_start_date > @backup_start_date order by backup_start_date desc) -- Did a differential backup name get found if @physical_device_name <> '' begin -- Build command to place a record in table that holds backup names select @cmd = 'insert into ##backupnames values (' + char(39) + @physical_device_name + char(39) + ',' + char(39) + @db + char(39) + ',' + char(39) + 'i' + char(39)+ ')' -- Execute command to place a record in table that holds backup names exec sp_executesql @cmd end -- Subsection 1C -------------------------------------------- -- Build command to place records in table to hold backup names for all -- transaction log backups from the last database backup set @CMD = 'insert into ##backupnames select physical_device_name,' + char(39) + @db + char(39) + ',' + char(39) + 'l' + char(39) + 'from msdb..backupset a join msdb..backupmediaset b on a.media_set_id = b.media_set_id join ' + 'msdb..backupmediafamily c on a.media_set_id = c.media_set_id ' + 'where type=' + char(39) + 'l' + char(39) + 'and backup_start_date > @backup_start_dat and' + char(39) + @db + char(39) + ' = database_name order by backup_start_date' -- Execute command to place records in table to hold backup names -- for all transaction log backups from the last database backup exec sp_executesql @cmd,@params=N'@backup_start_dat datetime', @backup_start_dat = @backup_start_date -- get next database to process fetch next from db into @db end -- close close db -- Section B ---------------------------------------------- open db -- Get first recod from database list cursor fetch next from db into @db -- Generate Heading in Restore script print '-- Restore All databases' -- Process all databases WHILE @@FETCH_STATUS = 0 BEGIN -- define cursor for all database and log backups for specific database being processed declare backup_name cursor for select name,type from ##backupnames where database_name = @DB -- Open cursor containing list of database backups for specific database being processed open backup_name -- Determine the number of different backups available for specific database being processed select @CNT = count(*) from ##backupnames where database_name = @DB -- Get first database backup for specific database being processed fetch next from backup_name into @physical_device_name, @type -- Set counter to track the number of backups processed set @NUM_PROCESSED = 0 -- Process until no more database backups exist for specific database being processed WHILE @@FETCH_STATUS = 0 BEGIN -- Increment the counter to track the number of backups processed set @NUM_PROCESSED = @NUM_PROCESSED + 1 -- Is the number of database backup processed the same as the number of different backups -- available for specific database being processed? if @CNT = @NUM_PROCESSED -- If so, is the type of backup currently being processed a transaction log backup? if @TYPE = 'l' -- build restore command to restore the last transaction log select @cmd = 'restore log ' + rtrim(@db) + char(13) + ' from disk = ' + char(39) + rtrim(substring(@physical_device_name,1,len(@physical_device_name))) + char(39) + char(13) + ' with replace' else -- Last backup was not a transaction log backup -- Build restore command to restore the last database backup select @cmd = 'restore database ' + rtrim(@db) + char(13) + ' from disk = ' + char(39) + rtrim(substring(@physical_device_name,1,len(@physical_device_name))) + char(39) + char(13) + ' with replace' else -- Current backup is not the last backup -- Is the current backup being processed a transaction log backup? if @TYPE = 'l' -- Build restore command to restore the current transaction backup, with no recovery select @cmd = 'restore log ' + rtrim(@db) + char(13) + ' from disk = ' + char(39) + rtrim(substring(@physical_device_name,1,len(@physical_device_name))) + char(39) + char(13) + ' with replace, norecovery' else -- Current backup being processed is not a transaction log backup -- Build restore command to restore the currrent database backup, with no recovery select @cmd = 'restore database ' + rtrim(@db) + char(13) + ' from disk = ' + char(39) + rtrim(substring(@physical_device_name,1,len(@physical_device_name))) + char(39) + char(13) + ' with replace, norecovery' -- if it is master comment line out if @db = 'master' set @cmd = '/* ' + char(13) + @cmd + char(13) + '*/' -- Generate the restore command and other commands for restore script print @cmd print 'go' print ' ' -- Get next database backup to process fetch next from backup_name into @physical_device_name, @type end -- Close and deallocate database backup name cursor for current database being processed close backup_name deallocate backup_name -- Get next database to process fetch next from db into @db end -- Close and deallocate cursor containing list of databases to process close db deallocate db -- Drop global temporary table drop table ##backupnames GO SET QUOTED_IDENTIFIER OFF GO SET ANSI_NULLS ON GO SCRIPT 2: ========= SET NOCOUNT ON DECLARE @NAME VARCHAR(128) DECLARE @DATUM DATETIME DECLARE @BACKUP_DATUM VARCHAR(128) SELECT @DATUM=GETDATE() SELECT @BACKUP_DATUM=CONVERT(VARCHAR(10),@DATUM,20) -- NU DE DATABASENAMEN OPHALEN UIT DE DICTIONARY DECLARE c1 CURSOR FOR SELECT name FROM master.dbo.sysdatabases WHERE name not like '%AIDA%' OPEN c1 FETCH NEXT FROM c1 INTO @NAME WHILE (@@fetch_status<>-1) BEGIN PRINT 'BACKUP DATABASE '+@NAME+' TO DISK=''d:\backup\local_dbs\'+@NAME+'_'+@BACKUP_DATUM+'.dmp' PRINT 'GO' FETCH NEXT FROM c1 INTO @NAME END CLOSE c1 DEALLOCATE c1 SCRIPT 3: ========= USE msdb DECLARE @v1 VARCHAR(30) DECLARE @v2 VARCHAR(30) SELECT @v1=max(backup_finish_date) FROM backupset SELECT @v2=getdate() IF (SELECT DATEDIFF(day, @v1, @v2)) in (0, 1) BEGIN BACKUP LOG sales TO sales_log_dump WITH INIT END select MAX(backup_finish_date) from msdb.dbo.backupset where database_name like 'SharePoint%' SCRIPT 4: ========= -- --------------------------------------- -- JOB: BACKUP_ALL_DATABASES (Once a week) -- -- Version : 1.2 -- Date : 27-12-2004 -- --------------------------------------- -- Step 1: removal of backup files older than 60 days -- -------------------------------------------------- DECLARE @fname VARCHAR(128) DECLARE @delstring VARCHAR(128) DECLARE @checkfile VARCHAR(128) DECLARE @result INT DECLARE @ftrue INT DECLARE c_1 CURSOR FOR SELECT f.physical_device_name FROM msdb.dbo.backupset s, msdb.dbo.backupmediafamily f WHERE s.media_set_id=f.media_set_id AND s.backup_start_date >getdate()-90 AND s.backup_start_date -1) BEGIN SELECT @delstring='del '+@fname SELECT @checkfile='dir '+@fname EXEC @ftrue=master.dbo.xp_cmdshell @checkfile IF (@ftrue=0) EXEC @RESULT = master.dbo.xp_cmdshell @delstring IF (@RESULT <> 0) BEGIN RAISERROR ('Backup_all_databases: One or more old backupdumps not found.', 16, 1) WITH LOG END FETCH NEXT FROM c_1 INTO @fname END CLOSE c_1 DEALLOCATE c_1 -- Step 2: backup databases -- ------------------------ SET NOCOUNT ON DECLARE @NAME VARCHAR(128) DECLARE @DATUM DATETIME DECLARE @BACKUP_DATUM VARCHAR(128) SELECT @DATUM=GETDATE() SELECT @BACKUP_DATUM=CONVERT(VARCHAR(10),@DATUM,20) -- NU DE DATABASENAMEN OPHALEN UIT DE DICTIONARY DECLARE c1 CURSOR FOR SELECT name from master.dbo.sysdatabases where name not like '%AIDA%' AND name not in ('master','model','tempdb','pubs','northwind','msdb') AND name not like 'SharePointPortal_Site%' AND name not like 'ProjectServer%' AND status < 100 OPEN c1 FETCH NEXT FROM c1 INTO @NAME WHILE (@@fetch_status<>-1) BEGIN EXEC('BACKUP DATABASE '+@NAME+' TO DISK=''d:\backup\'+@NAME+'_'+@BACKUP_DATUM+'.dmp'''+' WITH INIT') FETCH NEXT FROM c1 INTO @NAME END CLOSE c1 DEALLOCATE c1 ---------------------------------------------------------------------- 23. DBCC COMMANDS: ---------------------------------------------------------------------- 1. DBCC SHOWCONTIG ================== Understanding SQL Server's DBCC SHOWCONTIG Probably one of the most significant performance problems found in databases is centered around table data fragmentation. One situation that may be analogous to table fragmentation might be an index at the end of a large book. A single index entry in such a book might point to several pages scattered throughout the book. You must then scan each page for the specific information you require. This differs significantly from the index of the phone book which stores its data in sorted order. A typical query for the name "Jones" might span multiple consecutive pages, but are always held in a sorted order. In the case of a database, we start out with the data looking more like a phone book, and end with the data looking more like a history book. Therefore, we need to occasionally resort the data in an effort to recreate the phone book order. Below, you will see a graphical presentation of how SQL Server lays out the data so that we can discuss the actual findings more clearly. |====|====|====.. ..|====| the whole picture is one 64KB extent |====|====|====.. ..|====| |====|====|====.. ..|====| |====|====|====.. ..|====| |====|====|====.. ..|====| page A Quick SQL Server Internals Discussion We are most familiar with the data row. The row size is set only by the definition of the table that holds it (e.g. A table of addresses require more data per row then a table of class names). In SQL Server, a table may define a row as storing as little as 4 bytes to as much as 8060.This limit is set by the size of the data page, which stores up to 8,192 bytes (8 KB). The remaining 132 bytes are used by SQL Server to track other information under the covers. Although SQL Server is designed around 8 KB pages, the smallest unit of data that SQL Server can allocate is 64 KB. This is called an extent. To store the data in a sorted order, as in a phone book, SQL Server uses something called a clustered index. When a typical database is created, clustered indexes exist on nearly all tables. However, just because the data exists in sorted order within the page does not mean that it exists as such within an extent. The reason for this derives from situations in which there is no more room on a given page in which it can insert a row. SQL Server then removes approximately half the page and moves it to another page, which is called a Page Split (Page Splits will not occur with clustered indexes on IDENTITY based columns, but hotspotting may). In some cases, it may move that data to another extent altogether, possibly even allocating a new extent to do so. So, while we start off with names beginning with A and ending with H on one page, and names beginning with I and ending with Z on the next page, through usage, we may see that names A through C are now located on one page in one extent, D through E on another extent and S through Z back on the fifth page of the first extent, etc. It is because of the page split that there are times in which we may prefer to use tables with no clustered indexes at all. However, these tables are usually scratch tables which are highly volatile. In those situations, we desire the quicker write times at the cost of slower reads. Calling DBCC SHOWCONTIG Using Query Analyzer, connect to the database you wish to view. Next, you will need to get the object id of the table(s) you wish to examine. I have simplified this task to retrieve the top 10 tables by size using the following script. SELECT TOP 10 'DBCC SHOWCONTIG(' + CAST(id AS NVARCHAR(20)) + ')' + CHAR(10) + 'PRINT '' ''' + CHAR(10) FROM sysindexes WHERE indid = 1 or indid = 0 ORDER BY rows DESC Execute this script in the database that you wish to check, and you will get an output resembling (repeated 10 times, once for each of the 10 largest tables): DBCC SHOWCONTIG(123456789) PRINT '' Copy and paste the complete resultset into your query window and execute it. The Results Explained The results from the previous command will look something like the following: DBCC SHOWCONTIG scanning 'MyTable1' table... Table: 'MyTable1' (1556968673); index ID: 1, database ID: 16 TABLE level scan performed. - Pages Scanned................................: 18986 - Extents Scanned..............................: 2443 - Extent Switches..............................: 9238 - Avg. Pages per Extent........................: 7.8 - Scan Density [Best Count:Actual Count].......: 25.70% [2374:9239] - Logical Scan Fragmentation ..................: 44.58% - Extent Scan Fragmentation ...................: 87.07% - Avg. Bytes Free per Page.....................: 1658.7 - Avg. Page Density (full).....................: 79.51% DBCC execution completed. If DBCC printed error messages, contact your system administrator. DBCC SHOWCONTIG scanning 'MyTable2' table... Table: 'MyTable2' (183984032); index ID: 1, database ID: 16 TABLE level scan performed. - Pages Scanned................................: 28980 - Extents Scanned..............................: 3687 - Extent Switches..............................: 22565 - Avg. Pages per Extent........................: 7.9 - Scan Density [Best Count:Actual Count].......: 16.06% [3623:22566] - Logical Scan Fragmentation ..................: 83.05% - Extent Scan Fragmentation ...................: 87.44% - Avg. Bytes Free per Page.....................: 3151.1 - Avg. Page Density (full).....................: 61.07% DBCC execution completed. If DBCC printed error messages, contact your system administrator. In the first table, MyTable1, we see that there were 18,986 pages examined to create the report. Those pages existed within 2,443 extents, indicating that the table consumed approximately 97% (7.8 pages per extent on average) of the extents allocated for it. We then see that while examining the pages for fragmentation, the server had to switch extent locations 9, 238 times. The Scan Density restates this by indicating the percentage of all pages within all extents were contiguous. In an ideal environment, the density displayed would be close to 100. The Logical Scan Fragmentation and Extent Scan Fragmentation are indications of how well the indexes are stored within the system when a clustered index is present (and should be ignored for tables that do not have a clustered index). In both cases, a number close to 0 is preferable. There is another anomaly being displayed here that is a little difficult to explain, but it is that SQL Server allows multiple tables to exist within a single extent, which further explains the 7.8 pages per extent (multiple tables may not however exist within a page). The next items discuss a somewhat more mundane but important issue of page utilization. Again using the first table as the example, there are an average of 1659 bytes free per page, or that each page is 79.51% utilized. The closer that number gets to 100, the faster the database is able to read in records, since more records exist on a single page. However, this must be balanced with the cost of writing to the table. Since a page split will occur if a write is required on a page that is full, the overhead can be tremendous. This is exaggerated when using RAID 5 disk subsystems, since RAID 5 has a considerably slower write time compared to its read time. To account for this, we have the ability of telling SQL Server to leave each page a certain percentage full. DBCC REINDEX is a related tool that will reorganize your database information in much the same way Norton Defrag will work on your hard drive (see Books Online for information on how to use DBCC REINDEX). The following report displays the differences in the data after we defragmented the data using DBCC DBREINDEX. DBCC SHOWCONTIG scanning 'MyTable1' table... Table: 'MyTable1' (1556968673); index ID: 1, database ID: 16 TABLE level scan performed. - Pages Scanned................................: 15492 - Extents Scanned..............................: 1945 - Extent Switches..............................: 2363 - Avg. Pages per Extent........................: 8.0 - Scan Density [Best Count:Actual Count].......: 81.94% [1937:2364] - Logical Scan Fragmentation ..................: 15.43% - Extent Scan Fragmentation ...................: 20.15% - Avg. Bytes Free per Page.....................: 159.8 - Avg. Page Density (full).....................: 98.03% DBCC execution completed. If DBCC printed error messages, contact your system administrator. DBCC SHOWCONTIG scanning 'MyTable2' table... Table: 'MyTable2' (183984032); index ID: 1, database ID: 16 TABLE level scan performed. - Pages Scanned................................: 35270 - Extents Scanned..............................: 4415 - Extent Switches..............................: 4437 - Avg. Pages per Extent........................: 8.0 - Scan Density [Best Count:Actual Count].......: 99.35% [4409:4438] - Logical Scan Fragmentation ..................: 0.11% - Extent Scan Fragmentation ...................: 0.66% - Avg. Bytes Free per Page.....................: 3940.1 - Avg. Page Density (full).....................: 51.32% DBCC execution completed. If DBCC printed error messages, contact your system administrator. Here, we can see several key improvements and some examples of how proper indexing can be very important. The most glaring items for us are how well we were able to increase the scan density. Again, using the MyTable1 table as a reference, we can see that out of 1,945 extents, there were only 2363 extent switches. Notice that the number of extent switches is now a lower number than the original number of extents. This is due to the more efficient allocation of the data. And, since there is a significant reduction of the number of extent switches, searches for large quantities of contiguous data will be fulfilled much more quickly. These reports were taken after only a small amount of processing had occurred on this system, yet already we can see that there has been a fair amount of fragmentation of the data. The table MyTable1 has already begun to show signs of performance degradation. When there is an unusually large amount of new data being inserted into the tables, these numbers will quickly begin to resemble the those that we see in the previous report. In the table MyTable2, we see a stark difference from MyTable1.This is because of some index tuning that I had done on that table. As I said earlier, SQL Server uses the clustered indexes in order to understand how data should be ordered. To prevent page splits, I had SQL Server leave each page only 50% full. This allows for multiple inserts to occur without generating page splits, allowing our scan density to remain high for a longer period of time. But this also comes at the cost of reducing the quantity of contiguous records on each page and doubles the amount of space consumed by the table, hence the now much larger number of pages and extents scanned. Conclusion From examining the output of DBCC SHOWCONTIG, we were able to locate several key issues. First, we saw that our database was heavily fragmented, and required defragmentation using DBCC DBREINDEX. Next, we were able to tell what percentage of the allocated pages were actually being used by SQL Server. Finally, we saw that by modifying the fillfactor on an index, we had a tremendous affect on page splitting at the cost of more page I/O for each read. The following SQL Server DBCC commands, some documented and some not documented, can come in handy when you are trying to optimize your SQL Servers. DBCC CACHESTATS: Displays information about the object currently in the buffer cache, such as hit rates, compiled objects and plans, etc. Note in the sample results below that each of these SQL Server objects can be cached in the buffer cache of SQL Server. Example: DBCC CACHESTATS Sample Results (abbreviated): Object Name Hit Ratio ------------ ------------- Proc 0.86420054765378507 Prepared 0.99988494930394334 Adhoc 0.93237136647793051 ReplProc 0.0 Trigger 0.99843452831887947 Cursor 0.42319205924058612 Exec Cxt 0.65279111666076906 View 0.95740334726893905 Default 0.60895011346896522 UsrTab 0.94985969576133511 SysTab 0.0 Check 0.67021276595744683 Rule 0.0 Summary 0.80056155581812771 Here's what some of the key statistics from this command mean: Hit Ratio: Displays the percentage of time that this particular object was found in SQL Server's cache. The bigger this number, the better. Object Count: Displays the total number of objects of the specified type that are cached. Avg. Cost: A value used by SQL Server that measures how long it takes to compile a plan, along with the amount of memory needed by the plan. This value is used by SQL Server to determine if the plan should be cached or not. Avg. Pages: Measures the total number of 8K pages used, on average, for cached objects. LW Object Count, LW Avg Cost, WL Avg Stay, LW Ave Use: All these columns indicate how many of the specified objects have been removed from the cache by the Lazy Writer. The lower the figure, the better. * * * * * DBCC DROPCLEANBUFFERS: Use this command to remove all the test data from SQL Server's data cache (buffer) between tests to ensure fair testing. Keep in mind that this command only removes clean buffers, not dirty buffers. Because of this, before running the DBCC DROPCLEANBUFFERS command, you may first want to run the CHECKPOINT command first. Running CHECKPOINT will write all dirty buffers to disk. And then when you run DBCC DROPCLEANBUFFERS, you can be assured that all data buffers are cleaned out, not just the clean ones. Example: DBCC DROPCLEANBUFFERS * * * * * DBCC ERRORLOG: If you rarely restart the mssqlserver service, you may find that your server log gets very large and takes a long time to load and view. You can truncate (essentially create a new log) the Current Server log by running DBCC ERRORLOG. You might want to consider scheduling a regular job that runs this command once a week to automatically truncate the server log. As a rule, I do this for all of my SQL Servers. Also, you can accomplish the same thing using this stored procedure: sp_cycle_errorlog. Example: DBCC ERRORLOG * * * * * DBCC FLUSHPROCINDB: Used to clear out the stored procedure cache for a specific database on a SQL Server, not the entire SQL Server. The database ID number to be affected must be entered as part of the command. You may want to use this command before testing to ensure that previous stored procedure plans won't negatively affect testing results. Example: DECLARE @intDBID INTEGER SET @intDBID = (SELECT dbid FROM master.dbo.sysdatabases WHERE name = 'database_name') DBCC FLUSHPROCINDB (@intDBID) * * * * * DBCC INDEXDEFRAG: In SQL Server 2000, Microsoft introduced DBCC INDEXDEFRAG to help reduce logical disk fragmentation. When this command runs, it reduces fragmentation and does not lock the table, allowing other users to access the table when the defragmentation process is running. Unfortunately, this command doesn't do a great job of logical defragmentation. The only way to truly reduce logical fragmentation is to rebuild your table's indexes. While this will reduce all fragmentation, unfortunately it will lock the table, preventing users from accessing it during this process. This means that you will need to find a time when this will not present a problem to your users. Of course, if you are unable to find a time to reindex your indexes, then running DBCC INDEXDEFRAG is better than doing nothing. Example: DBCC INDEXDEFRAG (Database_Name, Table_Name, Index_Name) * * * * * DBCC FREEPROCCACHE: Used to clear out the stored procedure cache for all SQL Server databases. You may want to use this command before testing to ensure that previous stored procedure plans won't negatively affect testing results. Example: DBCC FREEPROCCACHE * * * * * DBCC MEMORYSTATUS: Lists a breakdown of how the SQL Server buffer cache is divided up, including buffer activity. Undocumented command, and one that may be dropped in future versions of SQL Server. Example: DBCC MEMORYSTATUS * * * * * DBCC OPENTRAN: An open transaction can leave locks open, preventing others from accessing the data they need in a database. This command is used to identify the oldest open transaction in a specific database. Example: DBCC OPENTRAN('database_name') * * * * * DBCC PAGE: Use this command to look at contents of a data page stored in SQL Server. Example: DBCC PAGE ({dbid|dbname}, pagenum [,print option] [,cache] [,logical]) where: Dbid or dbname: Enter either the dbid or the name of the database in question. Pagenum: Enter the page number of the SQL Server page that is to be examined. Print option: (Optional) Print option can be either 0, 1, or 2. 0 - (Default) This option causes DBCC PAGE to print out only the page header information. 1 - This option causes DBCC PAGE to print out the page header information, each row of information from the page, and the page's offset table. Each of the rows printed out will be separated from each other. 2 - This option is the same as option 1, except it prints the page rows as a single block of information rather than separating the individual rows. The offset and header will also be displayed. Cache: (Optional) This parameter allows either a 1 or a 0 to be entered. 0 - This option causes DBCC PAGE to retrieve the page number from disk rather than checking to see if it is in cache. 1 - (Default) This option takes the page from cache if it is in cache rather than getting it from disk only. Logical: (Optional) This parameter is for use if the page number that is to be retrieved is a virtual page rather then a logical page. It can be either 0 or 1. 0 - If the page is to be a virtual page number. 1 - (Default) If the page is the logical page number. * * * * * DBCC PINTABLE & DBCC UNPINTABLE: By default, SQL Server automatically brings into its data cache the pages it needs to work with. These data pages will stay in the data cache until there is no room for them, and assuming they are not needed, these pages will be flushed out of the data cache onto disk. At some point in the future when SQL Server needs these data pages again, it will have to go to disk in order to read them again into the data cache for use. If SQL Server somehow had the ability to keep the data pages in the data cache all the time, then SQL Server's performance would be increased because I/O could be significantly reduced on the server. The process of "pinning a table" is a way to tell SQL Server that we don't want it to flush out data pages for specific named tables once they are read in in the first place. This in effect keeps these database pages in the data cache all the time, which eliminates the process of SQL Server from having to read the data pages, flush them out, and reread them again when the time arrives. As you can imagine, this can reduce I/O for these pinned tables, boosting SQL Server's performance. To pin a table, the command DBCC PINTABLE is used. For example, the script below can be run to pin a table in SQL Server: DECLARE @db_id int, @tbl_id int USE Northwind SET @db_id = DB_ID('Northwind') SET @tbl_id = OBJECT_ID('Northwind..categories') DBCC PINTABLE (@db_id, @tbl_id) While you can use the DBCC PINTABLE directly, without the rest of the above script, you will find the script handy because the DBCC PINTABLE's parameters refer to the database and table ID that you want to pin, not by their database and table name. This script makes it a little easier to pin a table. You must run this command for every table you want to pin. Once a table is pinned in the data cache, this does not mean that the entire table is automatically loaded into the data cache. It only means that as data pages from that table are needed by SQL Server, they are loaded into the data cache, and then stay there, not ever being flushed out to disk until you give the command to unpin the table using the DBCC UNPINTABLE. It is possible that part of a table, and not all of it, will be all that is pinned. When you are done with a table and you no longer want it pinned, you will want to unpin your table. To do so, run this example code: DECLARE @db_id int, @tbl_id int USE Northwind SET @db_id = DB_ID('Northwind') SET @tbl_id = OBJECT_ID('Northwind..categories') DBCC UNPINTABLE (@db_id, @tbl_id) * * * * * DBCC PROCCACHE: Displays information about how the stored procedure cache is being used. Example: DBCC PROCCACHE * * * * * DBCC REINDEX: Periodically (weekly or monthly) perform a database reorganization on all the indexes on all the tables in your database. This will rebuild the indexes so that the data is no longer fragmented. Fragmented data can cause SQL Server to perform unnecessary data reads, slowing down SQL Server's performance. If you do a reorganization on a table with a clustered index, any non-clustered indexes on that same table will automatically be rebuilt. Database reorganizations can be done scheduling SQLMAINT.EXE to run using the SQL Server Agent, or if by running your own custom script via the SQL Server Agent (see below). Unfortunately, the DBCC DBREINDEX command will not automatically rebuild all of the indexes on all the tables in a database, it can only work on one table at a time. But if you run the following script, you can index all the tables in a database with ease. Example: DBCC DBREINDEX('table_name', fillfactor) or --Script to automatically reindex all tables in a database USE DatabaseName --Enter the name of the database you want to reindex DECLARE @TableName varchar(255) DECLARE TableCursor CURSOR FOR SELECT table_name FROM information_schema.tables WHERE table_type = 'base table' OPEN TableCursor FETCH NEXT FROM TableCursor INTO @TableName WHILE @@FETCH_STATUS = 0 BEGIN PRINT "Reindexing " + @TableName DBCC DBREINDEX(@TableName,' ',90) FETCH NEXT FROM TableCursor INTO @TableName END CLOSE TableCursor DEALLOCATE TableCursor The script will automatically reindex every index in every table of any database you select, and provide a fillfactor of 90%. You can substitute any number you want for the 90 in the above script. When DBCC DBREINDEX is used to rebuild indexes, keep in mind that as the indexes on a table are being rebuilt, that the table becomes unavailable for use by your users. For example, when a non-clustered index is rebuilt, a shared table lock is put on the table, preventing all but SELECT operations to be performed on it. When a clustered index is rebuilt, an exclusive table lock is put on the table, preventing any table access by your users. Because of this, you should only run this command when users don't need access to the tables being reorganized. * * * * * DBCC SHOWCONTIG: Used to show how fragmented data and indexes are in a specified table. If data pages storing data or index information becomes fragmented, it takes more disk I/O to find and move the data to the SQL Server cache buffer, hurting performance. This command tells you how fragmented these data pages are. If you find that fragmentation is a problem, you can reindex the tables to eliminate the fragmentation. Note, this fragmentation is fragmentation of data pages within the SQL Server MDB file, not of the physical file itself. Since this command requires you to know the ID of both the table and index being analyzed, you may want to run the following script so you don't have to manually look up the table name ID number and the index ID number. Example: DBCC SHOWCONTIG (Table_id, IndexID) or --Script to identify table fragmentation --Declare variables DECLARE @ID int, @IndexID int, @IndexName varchar(128) --Set the table and index to be examined SELECT @IndexName = 'index_name' --enter name of index SET @ID = OBJECT_ID('table_name') --enter name of table --Get the Index Values SELECT @IndexID = IndID FROM sysindexes WHERE id = @ID AND name = @IndexName --Display the fragmentation DBCC SHOWCONTIG (@id, @IndexID) While the DBCC SHOWCONTIG command provides several measurements, the key one is Scan Density. This figure should be as close to 100% as possible. If the scan density is less than 75%, then you may want to reindex the tables in your database. * * * * * DBCC SHOW_STATISTICS: Used to find out the selectivity of an index. Generally speaking, the higher higher the selectivity of an index, the greater the likelihood it will be used by the query optimizer. You have to specify both the table name and the index name you want to find the statistics on. Example: DBCC SHOW_STATISTICS (table_name, index_name) * * * * * DBCC SQLMGRSTATS: Used to produce three different values that can sometimes be useful when you want to find out how well caching is being performed on ad-hoc and prepared Transact-SQL statements. Example: DBCC SQLMGRSTATS Sample Results: Item Status ------------------------- ----------- Memory Used (8k Pages) 5446 Number CSql Objects 29098 Number False Hits 425490 Here's what the above means: Memory Used (8k Pages): If the amount of memory pages is very large, this may be an indication that some user connection is preparing many Transact-SQL statements, but it not un-preparing them. Number CSql Objects: Measures the total number of cached Transact-SQL statements. Number False Hits: Sometimes, false hits occur when SQL Server goes to match pre-existing cached Transact-SQL statements. Ideally, this figure should be as low as possible. * * * * * DBCC SQLPERF(): This command includes both documented and undocumented options. Let's take a look at all of them and see what they do. DBCC SQLPERF (LOGSPACE) This option (documented) returns data about the transaction log for all of the databases on the SQL Server, including Database Name, Log Size (MB), Log Space Used (%), and Status. DBCC SQLPERF (UMSSTATS) This option (undocumented) returns data about SQL Server thread management. DBCC SQLPERF (WAITSTATS) This option (undocumented) returns data about wait types for SQL Server resources. DBCC SQLPERF (IOSTATS) This option (undocumented) returns data about outstanding SQL Server reads and writes. DBCC SQLPERF (RASTATS) This option (undocumented) returns data about SQL Server read-ahead activity. DBCC SQLPERF (THREADS) This option (undocumented) returns data about I/O, CPU, and memory usage per SQL Server thread. * * * * * DBCC SQLPERF (UMSSTATS): When you run this command, you get output like this. (Note, this example was run on a 4 CPU server. There is 1 Scheduler ID per available CPU.) Statistic Value -------------------------------- ------------------------ Scheduler ID 0.0 num users 18.0 num runnable 0.0 num workers 13.0 idle workers 11.0 work queued 0.0 cntxt switches 2.2994396E+7 cntxt switches(idle) 1.7793976E+7 Scheduler ID 1.0 num users 15.0 num runnable 0.0 num workers 13.0 idle workers 10.0 work queued 0.0 cntxt switches 2.4836728E+7 cntxt switches(idle) 1.6275707E+7 Scheduler ID 2.0 num users 17.0 num runnable 0.0 num workers 12.0 idle workers 11.0 work queued 0.0 cntxt switches 1.1331447E+7 cntxt switches(idle) 1.6273097E+7 Scheduler ID 3.0 num users 16.0 num runnable 0.0 num workers 12.0 idle workers 11.0 work queued 0.0 cntxt switches 1.1110251E+7 cntxt switches(idle) 1.624729E+7 Scheduler Switches 0.0 Total Work 3.1632352E+7 Below is an explanation of some of the key statistics above: num users: This is the number of SQL Server threads currently in the scheduler. num runnable: This is the number of actual SQL Server threads that are runnable num workers: This is the actual number of worker there are to process threads. This is the size of the thread pool. idle workers: The number of workers that are currently idle. cntxt switches: The number of context switches between runnable threads. cntxt switches (idle): The number of context switches to the idle thread. * * * * * DBCC TRACEON & DBCC TRACEOFF: Used to turn on and off trace flags. Trace flags are often used to temporarily turn on and off specific server behavior or server characteristics. In rare occasions, they can be useful to troubleshooting SQL Server performance problems. Example: To use the DBCC TRACEON command to turn on a specified trace flag, use this syntax: DBCC TRACEON (trace# [,...n]) To use the DBCC TRACEON command to turn off a specified trace flag, use this syntax: DBCC TRACEOFF (trace# [,...n]) You can also use the DBCC TRACESTATUS command to find out which trace flags are currently turned on in your server using this syntax: DBCC TRACESTATUS (trace# [,...n]) For specific information on the different kinds of trace flags available, search this website or look them up in Books Online. [6.5, 7.0, 2000] More information on SQL Server 7.0 trace flags, and more info on SQL Server 2000 trace flags. * * * * * DBCC UPDATEUSAGE: The official use for this command is to report and correct inaccuracies in the sysindexes table, which may result in incorrect space usage reports. Apparently, it can also fix the problem of unreclaimed data pages in SQL Server. You may want to consider running this command periodically to clean up potential problems. This command can take some time to run, and you want to run it during off times because it will negatively affect SQL Server's performance when running. When you run this command, you must specify the name of the database that you want affected. Example: DBCC UPDATEUSAGE ('databasename') ---------------------------------------------------------------------- 24. DOCUMENTS AND GRAPHICS: ---------------------------------------------------------------------- Usually, text, ntext, or image strings are large (a maximum of 2GB) character or binary strings stored outside a data row. The data row contains only a 16-byte text pointer that points to the root node of a tree built of internal pointers that map the pages in which the string fragments are stored. 24.1 Pointer 1: =============== Insert Image ------------ Using TextCopy utility , you can import image data into SQLServer. Syntax as follow : ---------------------------------------------------------------------------- TEXTCOPY Version 1.0 DB-Library version 8.00.100 Copies a single text or image value into or out of SQL Server. The value is a specified text or image 'column' of a single row (specified by the "where clause") of the specified 'table'. If the direction is IN (/I) then the data from the specified 'file' is copied into SQL Server, replacing the existing text or image value. If the direction is OUT (/O) then the text or image value is copied from SQL Server into the specified 'file', replacing any existing file. TEXTCOPY [/S [sqlserver]] [/U [login]] [/P [password]] [/D [database]] [/T table] [/C column] [/W"where clause"] [/F file] [{/I | /O}] [/K chunksize] [/Z] [/?] /S sqlserver The SQL Server to connect to. If 'sqlserver' is not specified, the local SQL Server is used. /U login The login to connect with. If 'login' is not specified, a trusted connection will be used. /P password The password for 'login'. If 'password' is not specified, a NULL password will be used. /D database The database that contains the table with the text or image data. If 'database' is not specified, the default database of 'login' is used. /T table The table that contains the text or image value. /C column The text or image column of 'table'. /W "where clause" A complete where clause (including the WHERE keyword) that specifies a single row of 'table'. /F file The file name. /I Copy text or image value into SQL Server from 'file'. /O Copy text or image value out of SQL Server into 'file'. /K chunksize Size of the data transfer buffer in bytes. Minimum value is 1024 bytes, default value is 4096 bytes. /Z Display debug information while running. /? Display this usage information and exit. ------------------------------------------------------------------------------------ -- create the table with image column.... insert the some value... then import image into the table using textcopy... then only you get the text or image pointer to update the image value..... Example : -- import table stru create table EmployeeImage ( EmployeeID int, Pic image default "image") insert EmployeeImage(EmployeeID) values (1) insert EmployeeImage(EmployeeID) values (2) You should also have some value in the Pic column. e.g. 0x0 is OK, otherwise textcopy gives an error. UPDATE EmployeeImage SET Pic=0x0 where employeeid=2 -- Fill the UserName,Password,ServerName and the Image file with full path like c:\employee1.bmp... textcopy -I -U -P -S -D Pubs -TEmployeeImage -C pic -W"where EmployeeID = 2" -F c:\Employee1.bmp textcopy -I -U -P -S -D Pubs -TEmployeeImage -C pic -W"where EmployeeID = 2" -F c:\Employee2.bmp -- Get Image Data from SQL Server and Displayed through Visual Basic The following example, uses Visual Basic 6.0 and ADO 2.5, to retreive Employee image from the table EmployeeImage. Assume that, Employee Image info is available for EmployeeID as 1.[using TextCopy] EmployeeID, Pic --------- ------------------------------- 1 ox1234.......................... create proc EmployeeSel @EmployeeID int as select Pic from EmployeeImage where EmployeeID = @EmployeeID Sample VB code -------------- In this code , you have to fill the values of Data Source,Initial Catalog,UserID,Password in the sServerDBConn string. Open the standard EXE project, add a Form and a Image control and name it as ImgEmployee. Use the code and you will the Picture. You cannot directly store the image column. Open a Binary file, write the image info and then load the image. you can retrieve the info through direct select/view/Stored Procedure. Here the Stored Procedure "EmployeeSel", retrive the Image of Employee [whose ID is 1]. -------------------------------------------------------------------------------------------- Dim objConn As New ADODB.Connection Dim objcmd As New ADODB.Command Dim objRec As ADODB.Recordset Dim bytImage() As Byte Dim sServerDBConn As String sServerDBConn = "Provider=SQLOLEDB; Data Source =SQLServerName; Initial Catalog = DatabaseName; User ID=UserName;Password= " objConn.Open sServerDBConn With objcmd .ActiveConnection = objConn .CommandText = "EmployeeSel" .CommandType = adCmdStoredProc .Parameters.Append .CreateParameter("EmployeeID", adInteger, adParamInput, 0, 1) Set objRec = objcmd.Execute End With bytImage = objRec("Pic") Open "c:\temp\EmpImage.bmp" For Binary As #1 Put #1, , bytImage() Close #1 imgEmployee.Picture = LoadPicture("c:\temp\EmpImage.bmp") ------------------------------------------------------------------------------------------------- It is a Simple one. For more info , refer SQL Server BOL,Visual Basic Documentation and ActiveX Data Objects Documentation. [GetChunk/AppendChunk methods]. For SQLServer 2000, more options available. Refer BOL. 24.2 Pointer 2: =============== '*************************************************************** '* Save/Retrieve Image Data From SQL Server DataBase Using '* ADOStream Objects. '*************************************************************** '* Code By: Michael P. Gerety '*************************************************************** 'Make sure you have a reference to ADODB v. 2.5 or later Dim rstRecordset As ADODB.Recordset Dim cnnConnection As ADODB.Connection Dim strStream As ADODB.Stream '*Setup: '*Create a form and place 3 command buttons named: '*cmdLoad, cmdSelectSave, and cmdClear '*Place a CommonDialog Control Named Dialog '*Place an ImageBox (or PictureBox) named Image1 '** The field type in Sql Server must be "Image" '** Everywhere you see "***" in the code is where you must enter '** your own data. Private Sub cmdClear_Click() Image1.Picture = Nothing End Sub Private Sub cmdLoad_Click() If Not LoadPictureFromDB(rstRecordset) Then MsgBox "Invalid Data Or No Picture In DB" End If End Sub Private Sub cmdSelectSave_Click() 'Open Dialog Box With dlgDialog .DialogTitle = "Open Image File...." .Filter = "Image Files (*.gif; *.bmp)| *.gif;*.bmp" .CancelError = True procReOpen: .ShowOpen If .FileName = "" Then MsgBox "Invalid filename or file not found.", _ vbOKOnly + vbExclamation, "Oops!" GoTo procReOpen Else If Not SavePictureToDB(rstRecordset, .FileName) Then MsgBox "Save was unsuccessful :(", vbOKOnly + _ vbExclamation, "Oops!" Exit Sub End If End If End With End Sub Private Sub Form_Load() Set cnnConnection = New ADODB.Connection Set rstRecordset = New ADODB.Recordset cnnConnection.Open ("Provider=SQLOLEDB; " & _ "data Source=**YourServer**;" & _ "Initial Catalog=**YourDatabase**; " & _ "User Id=**YourUID**;Password=***YourPass***") rstRecordset.Open "Select * from YourTable", cnnConnection, _ adOpenKeyset, adLockOptimistic End Sub Public Function LoadPictureFromDB(RS As ADODB.Recordset) On Error GoTo procNoPicture 'If Recordset is Empty, Then Exit If RS Is Nothing Then GoTo procNoPicture End If Set strStream = New ADODB.Stream strStream.Type = adTypeBinary strStream.Open strStream.Write RS.Fields("**YourImageField**").Value strStream.SaveToFile "C:\Temp.bmp", adSaveCreateOverWrite Image1.Picture = LoadPicture("C:\Temp.bmp") Kill ("C:\Temp.bmp") LoadPictureFromDB = True procExitFunction: Exit Function procNoPicture: LoadPictureFromDB = False GoTo procExitFunction End Function Public Function SavePictureToDB(RS As ADODB.Recordset, _ sFileName As String) On Error GoTo procNoPicture Dim oPict As StdPicture Set oPict = LoadPicture(sFileName) 'Exit Function if this is NOT a picture file If oPict Is Nothing Then MsgBox "Invalid Picture File!", vbOKOnly, "Oops!" SavePictureToDB = False GoTo procExitSub End If RS.AddNew Set strStream = New ADODB.Stream strStream.Type = adTypeBinary strStream.Open strStream.LoadFromFile sFileName RS.Fields("***YourImageField***").Value = strStream.Read Image1.Picture = LoadPicture(sFileName) SavePictureToDB = True procExitSub: Exit Function procNoPicture: SavePictureToDB = False GoTo procExitSub End Function 24.3 Pointer 3: =============== SET QUOTED_IDENTIFIER ON GO SET ANSI_NULLS ON GO CREATE PROCEDURE audit_dealer AS DECLARE @ptrfromA varbinary(16), @ptrtoA varbinary(16), @ptrfromB varbinary(16), @ptrtoB varbinary(16), @dealer_zip_id int INSERT INTO dealer_zip(create_date) VALUES(getdate()) SELECT @dealer_zip_id = MAX(dealer_zip_id) FROM dealer_zip INSERT INTO dealer_audit(dealer_zip_id, dealer_univ_nbr, boxa_upload_user, boxa_upload_date, boxa_approve_user, boxa_approve_date, boxb_upload_user, boxb_upload_date, boxb_approve_user, boxb_approve_date, boxb_text_id, boxb_image_id) SELECT @dealer_zip_id, dealer_imageset.dealer_univ_nbr, boxa_upload_user, boxa_upload_date, boxa_approve_user, boxa_approve_date, boxa_upload_user, boxb_upload_date, boxb_approve_user, boxb_approve_date, boxb_text_id, boxb_image_id FROM dealer_imageset, dealer WHERE dealer_imageset.dealer_univ_nbr = dealer.dealer_univ_nbr AND boxa_approved_flag = 'y' AND (boxb_approved_flag = 'y' or boxb_permission_flag <> 'y') AND dealer_imageset.dealer_univ_nbr in ( SELECT DISTINCT dealer_univ_nbr FROM dealer_info WHERE a_graphic IS NOT NULL OR b_graphic IS NOT NULL) DECLARE audit_cursor CURSOR FOR SELECT TEXTPTR(di.boxa_image) from_boxa, TEXTPTR(di.boxb_image) from_boxb, TEXTPTR(a.boxa_image) to_boxa, TEXTPTR(a.boxb_image) to_boxb FROM dealer_imageset di, dealer_audit a, dealer d WHERE a.dealer_zip_id = @dealer_zip_id AND d.dealer_univ_nbr = a.dealer_univ_nbr AND di.dealer_univ_nbr = a.dealer_univ_nbr AND (di.boxb_text = NULL OR di.boxb_text = '') AND di.boxa_approved_flag = 'y' AND (di.boxb_approved_flag = 'y' or d.boxb_permission_flag <> 'y') OPEN audit_cursor FETCH NEXT FROM audit_cursor INTO @ptrfromA, @ptrfromB, @ptrtoA, @ptrtoB WHILE @@FETCH_STATUS = 0 BEGIN IF @ptrfromA is not null BEGIN UPDATETEXT dealer_audit.boxa_image @ptrtoA 0 null dealer_imageset.boxa_image @ptrfromA END IF @ptrfromB is not null BEGIN UPDATETEXT dealer_audit.boxa_image @ptrtoB 0 null dealer_imageset.boxa_image @ptrfromB END FETCH NEXT FROM audit_cursor INTO @ptrfromA, @ptrfromB, @ptrtoA, @ptrtoB END CLOSE audit_cursor DEALLOCATE audit_cursor -- finally, delete all blobs that have been in the audit table for more than two months UPDATE dealer_audit SET boxa_image = null, boxb_image = null WHERE ISNULL(boxa_approve_date, getdate()) < getdate() - 60 AND ISNULL(boxb_approve_date, getdate()) < getdate() - 60 GO SET QUOTED_IDENTIFIER OFF GO SET ANSI_NULLS ON GO 24.4 Pointer 4: =============== Formatted text strings, such as Microsoft® Word™ document files or HTML files, cannot be stored in character string or Unicode columns because many of the bytes in these files contain data structures that do not form valid characters. Database applications may still have a need to access this data and apply full-text searches to it. Many sites store this type of data in image columns, because image columns do not require that each byte form a valid character. SQL Server 2000 introduces the ability to perform full-text searches against these types of data stored in image columns. SQL Server 2000 supplies filters that allow it to extract the textual data from Microsoft Office™ files (.doc, .xls, and .ppt files), text files (.txt files), and HTML files (.htm files). When you design the table, in addition to the image column that holds the data, you include a binding column to hold the file extension for the format of data stored in the image column. You can create a full-text index that references both the image column and the binding column to enable full-text searches on the textual information stored in the image column. The SQL Server 2000 full-text search engine uses the file extension information from the binding column to select the proper filter to extract the textual data from the column. 24.5 Pointer 5: =============== Const ForReading = 1, ForWriting = 2, ForAppending = 8 Dim buf, rs, cn, ConnectionString, Sql, JpegFileName Dim fso, f JpegFileName = "C:\FullPath\FileName.JPG" ConnectionString = "Provider=SQLOLEDB;Server=(local);" & _ "Database=MyPictures;Trusted_Connection=Yes;" set cn = Server.CreateObject("ADODB.Connection") set rs = Server.CreateObject("ADODB.Recordset") cn.Open ConnectionString Set fso = CreateObject("Scripting.FileSystemObject") Set f = fso.OpenTextFile(JpegFileName, ForReading, False) buf = f.ReadAll f.Close Sql = "SELECT ImageData FROM Pix" rs.open Sql, cn, adOpenKeyset, adLockOptimistic rs.AddNew rs(0).AppendChunk buf rs.Update rs.Close Set rs.ActiveConnection = Nothing Set rs = Nothing cn.Close Set cn = Nothing 24.6 Pointer 6: =============== Maximum bytes per row is 8060. Binary data like images has a 16 byte pointer on the page & the data is stored on separate pages [small amounts of binary data <8K can be stored on the page in SQL 2K] To store/retrieve this sort of data within TSQL scripts you have to use the WRITETEXT and READTEXT commands rather than standard INSERT/SELECT statements. These are documented, with examples, in the books-online but are basically a real pain to use. There are more manageable commands available from within the relevant programming languages - e.g. RDO and ADO from VB/C can use GetChunk and AppendChunk commands - but you still have to manage the image/text chunks/blocks of data at a time. About the only upside of storing this sort of data within SQL Server is that it can be kept transactionally consistent with the other data. For sample code see Q194975 - "Sample Functions Demonstrating GetChunk and AppendChunk". Private Sub Upload() Dim rs As ADODB.Recordset Dim stm As ADODB.Stream Dim SQL As String 'instantiating the objects Set rs = New ADODB.Recordset Set stm = New ADODB.Stream 'getting the image from the file With stm stm.Type = adTypeBinary stm.Open stm.LoadFromFile PICName 'This file is passed from user click control's click event. End With 'establishing the SQL statement SQL = "SELECT Pic, pic_location FROM Picture" 'storing the file into the database With rs .CursorType = adOpenKeyset .LockType = adLockOptimistic .Open SQL, DatabaseConnection(ServerName, DatabaseName, UserId, Password) .AddNew .Fields("pic") = stm.Read .Fields("Pic_Location") = PICName .Update .Close End With 'prompting the user MsgBox "Image File : " & PICName & " has been succesfully uploaded to the database.", vbInformation 'clean up Set rs.ActiveConnection = Nothing stm.Close Set rs = Nothing Set stm = Nothing End Sub 24.7 Pointer 7: =============== I am having trouble with creating a format file to bcp in a jpeg. The table has one column which has image as the datatype. When I use the following bcp command and manually provide the listed responses the jpeg goes in fine. When I then try and use the format file the system generates nothing happens no error messages at all. Here is the bcp command and the responses to the four prompts. bcp ecsm..image_test in manchesterbw.jpg -T -SDPDDDDS001 Enter the file storage type of field picture [image]: I Enter prefix-length of field picture [4]: 0 Enter length of field picture [0]: 54026 Enter field terminator [none]: This is the format file that is generated by the system: 6.0 1 1 SQLBINARY 0 54026 "" 1 picture This the bcp statement I am using to utilize the format file: bcp ecsm..image_test in manchesterbw.jpg -f russell.fmt -T -SDPDDDDS001 -- think the formal file should be as follows : 6.0 1 1 SQLIMAGE 0 54026 "" 1 data -- I changed the SQLBINARY to SQLIMAGE. All is now working thanks for the tip. -- 24.8 Pointer 8: =============== Text, ntext, and image data have been around a long time, but their nuances can be easy to overlook. This tutorial, provides a quick overview of the implementation and usage of these special data types. Databases are growing in size and complexity, in part because today's hardware and software allow us to store mind-boggling amounts of data—including multimedia and document data. JPG, PNG, MP3, DOC/RTF, HTML, Unicode, and XML data can all be stored as image, text, or ntext in SQL Server databases. Generally speaking, you use text to store huge ASCII character strings, ntext for Unicode character strings, and image for binary image data. Worried about size? Text gives you up to 2^31 - 1 (2,147,483,647) variable-length non-Unicode characters, ntext up to 2^30 - 1 (1,073,741,823) characters, and image up to 2^31 - 1 (2,147,483,647) bytes. The actual storage size, in bytes, for ntext is two times the number of characters entered. The SQL-92 synonym for ntext is national text. So how do they work? They use pointers to reference the data. Special functions allow the pointer to add, extract, or remove data from them. Text and image functions Here I'll describe a number of text and image functions, showing the syntax and an example with output for each. TEXTPTR TEXTPTR returns a varbinary of length 16 bytes, which is the text-pointer value that references a text, ntext, or image column. Syntax: TEXTPTR ( column ) --TEXTPTR sample, create a text-pointer, see its value create table #t (n ntext) insert #t values('abcdef') DECLARE @ptrval binary(16) SELECT @ptrval = TEXTPTR(n) FROM #t print @ptrval drop table #t Output: 0xFFFF6900000000004D00000001000000 TEXTVALID TEXTVALID returns an int, which will be of value 1 if the text-pointer is valid, 0 otherwise. Syntax: TEXTVALID ( 'table.column' , text_ptr ) --TEXTVALID sample, creates a text-pointer, tests it create table #t (n ntext) insert #t values('abxyef') DECLARE @ptrval binary(16), @ptrval2 binary(16) SELECT @ptrval = TEXTPTR(n) FROM #t if TEXTVALID('#t.n',@ptrval)=1 print '@ptrval has a valid text pointer.' else print '@ptrval has an invalid text pointer.' if TEXTVALID('#t.n',@ptrval2)=1 print '@ptrval2 has a valid text pointer.' else print '@ptrval2 has an invalid text pointer.' drop table #t Output: @ptrval has a valid text pointer. @ptrval2 has an invalid text pointer. SET TEXTSIZE SET TEXTSIZE sets the size, an int value, of text and ntext data to be returned when using a SELECT statement. Syntax: SET TEXTSIZE { number } --SET TEXTSIZE sample create table #t (n ntext) insert #t values('abcdefghijk') SET TEXTSIZE 10--ntext is unicode, 2 bytes/character select * from #t SET TEXTSIZE 20--ntext is unicode, 2 bytes/character select * from #t drop table #t Output: abcde abcdefghij @@TEXTSIZE @@TEXTSIZE returns the size, an int value, of text and ntext data to be returned when using a SELECT statement. This value is set with SET TEXTSIZE. Syntax: @@TEXTSIZE --@@TEXTSIZE sample SET TEXTSIZE 10--ntext is unicode, 2 bytes/character print @@TEXTSIZE SET TEXTSIZE 20--ntext is unicode, 2 bytes/character print @@TEXTSIZE Output: 10 20 WRITETEXT WRITETEXT overwrites the data from a text, ntext, or image column. Syntax: WRITETEXT { table.column text_ptr } [ WITH LOG ] { data } --WRITETEXT sample create table #t (n ntext) insert #t values('abc') DECLARE @ptrval binary(16) SELECT @ptrval = TEXTPTR(n) FROM #t WRITETEXT #t.n @ptrval 'def' select * from #t drop table #t Output: def UPDATETEXT UPDATETEXT changes the data from an existing text, ntext, or image column. Syntax: UPDATETEXT { table_name.dest_column_name dest_text_ptr } { NULL | insert_offset } { NULL | delete_length } [ WITH LOG ] [ inserted_data | { table_name.src_column_name src_text_ptr } ] --UPDATETEXT sample insertion only create table #t (n ntext) insert #t values('bd') DECLARE @ptrval binary(16), @i int SELECT @ptrval = TEXTPTR(n) FROM #t UPDATETEXT #t.n @ptrval 0 0 'a'--insert at beginning select * from #t UPDATETEXT #t.n @ptrval 2 0 'c'--insert in the middle select * from #t set @i=(select DATALENGTH(n) from #t)/2 --/2 only if ntext, 2 bytes/character print @i UPDATETEXT #t.n @ptrval @i 0 'e'--insert at the end select * from #t drop table #t Output: abd abcd abcde Sample deletion and insertion: --UPDATETEXT sample deletion+insertion create table #t (n ntext) insert #t values('abxyef') DECLARE @ptrval binary(16), @i int SELECT @ptrval = TEXTPTR(n) FROM #t UPDATETEXT #t.n @ptrval 2 2 'cd'--insert 2, delete 2 --chars starting at position 2 select * from #t drop table #t Output: abcdef READTEXT READTEXT reads a certain amount of data from a text, ntext, or image column. Syntax: READTEXT { table.column text_ptr offset size } [ HOLDLOCK ] --READTEXT sample create table #t (n ntext) insert #t values('abcdefghijk') DECLARE @ptrval binary(16) SELECT @ptrval = TEXTPTR(n) FROM #t READTEXT #t.n @ptrval 3 8 --read 8 characters starting at position 3 drop table #t Output: defghijk DATALENGTH DATALENGTH returns the size (number of bytes) of a text, ntext, or image column. Syntax: DATALENGTH ( expression ) --DATALENGTH sample create table #t (n ntext) insert #t values('1234567890') DECLARE @i int set @i=(select DATALENGTH(n) from #t) --it should return the length in bytes=2*UNICODE length PRINT @i drop table #t Output: 20 PATINDEX PATINDEX returns the location, an int value, of the first occurrence of a pattern in a text, ntext, or image column, or 0 if the pattern wasn't found. Syntax: PATINDEX ( '%pattern%' , expression ) --PATINDEX sample create table #t (n ntext) insert #t values('Hello Tim, long time no see!') SELECT PATINDEX('%tim%', n) FROM #t SELECT PATINDEX('%time%', n) FROM #t drop table #t Output: 7 17 CONVERT CONVERT returns an expression converted from one data type to another. Syntax: CONVERT ( data_type [ ( length ) ] , expression [ , style ] ) --CONVERT sample create table #t (n ntext) insert #t values('Hello Tim, long time no see!') DECLARE @c nvarchar(5) SET @c=(select convert(nvarchar(5),n) from #t) print @c drop table #t Output: Hello CAST CAST returns an expression casted (converted) from one data type to another. Syntax: CAST ( expression AS data_type ) --CAST sample create table #t (n ntext) insert #t values('Hello Tim, long time no see!') DECLARE @c nvarchar(5) SET @c=(select CAST ( n AS nvarchar(5) ) from #t) print @c drop table #t Output: Hello Saving a text, ntext, or image column to a file I created three separate stored procs that show you how you can save one column of type text, ntext, or image to a file. You'll find the code for these and all other examples in the accompanying Download file. --saveText2file sample create table ##t (n text) insert ##t values('Hello Tim, long time no see!') EXEC saveText2file 'c:\test.txt', '##t','n', '' drop table ##t --saveNtext2file sample create table ##t (n ntext) insert ##t values('Hello Tim, long time no see!') EXEC saveNtext2file 'c:\test.txt', '##t','n', '' drop table ##t --saveImage2file sample exec saveImage2file 'c:\Category1.bak', 'Northwind..Categories', 'Picture', 'where categoryid=1' Updating a text, ntext, or image column from a file Because TEXTPTR, WRITETEXT, and UPDATETEXT don't allow variable names to define the table or column parameters, reading the contents of a file into a column requires you to use dynamic SQL. Stored proc readImageFromfile can handle both image and varchar data types because it reads the data as binary and writes it without using temporary tables. Ntext can be read using readNtextFromfile. --readImageFromfile sample --reading a text column from a file create table ##t (n text) insert ##t values('Hi Tim, long time no see!') EXEC readImageFromfile 'c:\hello.txt', '##t','n', '' select * from ##t drop table ##t Output: Hello 24.9 Pointer 9: =============== Save Data in a SQL Server Image Column with VB6 I needed to retrieve image fields on SQL Server 7.0 with VB6, and I couldn't find any article about it. So, I assume others have had the same problem. I've since found a method for doing it. You must use Microsoft ADO 2.5 and set it into the following project reference: dim rst as new adodb.recordset dim adoConn as new adodb.Connection You also have to open the connection with the database. 'Open recordset.... rst.Open "Select * from where ", adoConn, adOpenKeyset, adLockOptimistic 'THIS FUNCTION SAVES AN IMAGE INTO AN IMAGE DATATYPE FIELD Private Function SaveImage() Dim mStream As New ADODB.Stream With mStream .Type = adTypeBinary .Open .LoadFromFile "" rst(""). Value = .Read rst.Update End With Set mStream = Nothing End Function 'THIS FUNCTION LOAD IMAGE FROM IMAGE DATATYPE FIELD AND SAVE IT INTO A FILE..... Private Function LoadImage() Dim mStream As New ADODB.Stream With mStream .Type = adTypeBinary .Open .Write rst("") .SaveToFile "", adSaveCreateOverWrite End With Set mStream = Nothing End Function Aside from this method, you can use a picture control to store an image, put a picture control into a form, and call it PictureTemp. PictureTemp.DataField = "Immagine" 'Set DataField.... Set PictureTemp.DataSource = rst 'Set DataSource You can use the PictureTemp.Picture property to get your image. Private Function LoadImage() Dim mStream As New ADODB.Stream With mStream .Type = adTypeBinary .Open PictureTemp.DataField = "Immagine" 'Set DataField.... Set PictureTemp.DataSource = rst 'Set DataSource Set MSFGRID.CellPicture = PictureTemp.Picture 'Show image into a cell of Microsoft FlexGrid End With Set mStream = Nothing End Function 24.10 Pointer 10: ================= An application that reads and writes Word docs to and from SQL server. Dim rstRecordset As ADODB.Recordset Dim cnnConnection As ADODB.Connection Dim strStream As ADODB.Stream Dim imgname As String Private Sub cmdLoad_Click() Set cnnConnection = New ADODB.Connection Set rstRecordset = New ADODB.Recordset imgname = GiveId.Text cnnConnection.Open ("Provider=SQLOLEDB; " & _ "data Source=xpora;" & _ "Initial Catalog=pubs; " & _ "User Id=karel;Password=karel") rstRecordset.Open "Select * from docs where id=" & imgname, cnnConnection, _ adOpenKeyset, adLockOptimistic Set strStream = New ADODB.Stream strStream.Type = adTypeBinary strStream.Open strStream.Write rstRecordset.Fields("Doc").Value strStream.SaveToFile "C:\Temp.doc", adSaveCreateOverWrite Shell "E:\Program Files\Microsoft Office\Office\Winword.exe " & _ Chr$(34) & "C:\temp.doc", 1 End Sub 'Or as an alternative to the Shell command: Private Sub cmdLoad2_Click() Set cnnConnection = New ADODB.Connection Set rstRecordset = New ADODB.Recordset imgname = GiveId.Text cnnConnection.Open ("Provider=SQLOLEDB; " & _ "data Source=xpora;" & _ "Initial Catalog=pubs; " & _ "User Id=karel;Password=karel") rstRecordset.Open "Select * from docs where id=" & imgname, cnnConnection, _ adOpenKeyset, adLockOptimistic Set strStream = New ADODB.Stream strStream.Type = adTypeBinary strStream.Open strStream.Write rstRecordset.Fields("Doc").Value strStream.SaveToFile "C:\Temp.doc", adSaveCreateOverWrite Dim wsApp As Word.Application 'Set wsApp = GetObject(, "Word.Application") Set wsApp = CreateObject("Word.Application") wsApp.Visible = True wsApp.Documents.Open ("c:\temp.doc") End Sub Private Sub cmdQuit_Click() End End Sub Private Sub cmdSelectSave_Click() 'Shell "C:\Program Files\Microsoft SQL Server\mssql\binn\textcopy.exe " & _ '"-I -S xpora -D pubs -T docs -C doc -U karel -P karel -W where id=" & imgname & " -F c:\temp.doc" Set cnnConnection = New ADODB.Connection Set rstRecordset = New ADODB.Recordset imgname = GiveId.Text cnnConnection.Open ("Provider=SQLOLEDB; " & _ "data Source=xpora;" & _ "Initial Catalog=pubs; " & _ "User Id=karel;Password=karel") rstRecordset.Open "Select * from docs where id=" & imgname, cnnConnection, _ adOpenKeyset, adLockOptimistic Set mstream = New ADODB.Stream mstream.Type = adTypeBinary mstream.Open mstream.LoadFromFile "c:\temp.doc" rstRecordset.Fields("doc").Value = mstream.Read rstRecordset.Update rstRecordset.Close cnnConnection.Close End Sub ---------------------------------------------------------------------- 25. Named pipes, Sockets, and Multiprotocol: ---------------------------------------------------------------------- 25.1 TCP/IP Sockets: -------------------- Suppose the Server 10.10.10.1 has multiple Server programs running. How does a client differentiate between the multiple Server programs? The usual way with tcpip is the use of sockets. A socket is an "identifier" completely identifying the location of a Server on the network, as well as the "port" the server service is listening on, like for example: 10.10.10.1 : 1521 or for example 10.10.10.1 : 1433 The client should have knowledge of the "port" of the desired Host program or the host service is listening on. For example it could come from a local services file, or a registry. The client constructs a tcp header, while in the destination port, the port is listed where the Host Server service or deamon is listening on. Server, IP=10.10.10.1 |------------------------------------------------ | | | ------------------ --------------------- | | |Oracle listener | |SQL Server listener | | | |listening on port | |listening on port | | | |1521 | | 1433 | | | ------------------ -------------------- | | ^ ^ | | | | | client request for | | | | connection to Oracle | | | | 10.10.10.1:1521 | ------------------------------- | ----------------------> | |Portmapper / Netlib router | | | |handling | | Client request for | |requests to the desired host | | connection to SQL Server| |program | | 10.10.10.1:1433 | | | | ----------------------> | | | | | ------------------------------ | | | |------------------------------------------------ 25.2 Named pipes: ----------------- A high level process, like a client program, can open and write to a "special file", the "named pipe". The named pipe can be considered to be at the OSI layer 7, and is an IPC mechanism for process to process communication, locally or across a network. In Windows, the design of named pipes is biased towards client-server communication, and they work much like sockets: other than the usual read and write operations, Windows named pipes also support an explicit "passive" mode for server applications (compare: UNIX domain sockets). Named pipes aren't permanent and can't be created as special files on any writable filesystem, unlike in UNIX, but are volatile names (freed after the last reference to them is closed) allocated in the root directory of the named pipe filesystem (NPFS), mounted under the special path \\.\pipe\ (that is, a pipe named "foo" would have a full path name of \\.\pipe\foo). Anonymous pipes used in pipelining actually are named pipes with a random name. In "constructing" the client program (VB, C++, VB.NET, C# etc...) there is some sort of mechanisme to create a named pipe, for example: Public Declare Function CallNamedPipe Lib "kernel32" Alias "CallNamedPipeA" _ (ByVal lpNamedPipeName As String, etc...... The pipe is an IPC construct above any network protocol as sockets/tcp/ip, or nwlink spx/ipx etc.. It uses the IPC$ share of the remote system, just like a filesystemshare. \\computername\pipe\MSSQL$instancename\sql\query CLIENT: ---------------------------------------- rw to and from pipe named pipe \\.\sql\query, <-------------------------> Server named pipe functions like a sort of URL or share ---------------------------------------- session management, sockets, netbios ---------------------------------------- TCP SPX ---------------------------------------- IP IPX ---------------------------------------- Datalink ---------------------------------------- physiscal network ---------------------------------------- 25.3 Multiprotocol: ------------------- It's a protocol that layers over named pipes, tcpip sockets, or nwlink spx/ipx sockets. So, just MUST have one of the above IPC mechanismens available. The Multiprotocol selection has two key features: Automatic selection of an available network protocol to communicate with an instance of Microsoft® SQL Server™. This is convenient when you want to connect to multiple servers running different network protocols but do not want to reconfigure the client connection for each server. If the client and server Net-Libraries for TCP/IP Sockets, NWLink IPX/SPX, or Named Pipes are installed on the client and server, the Multiprotocol Net-Library will automatically choose the first available network protocol to establish a connection. Client encryption. You can enforce encryption over the Multiprotocol Net-Library on clients running on the Microsoft Windows NT® 4.0, Windows® 2000, Windows 95, or Windows 98 operating system to prevent others from intercepting and viewing sensitive data. The Multiprotocol Net-Library takes advantage of the remote procedure call (RPC) facility of Windows NT 4.0 and Windows 2000, which provides Windows Authentication. For the Multiprotocol Net-Library, clients determine the server address using the server name. Usage Considerations Before using the Multiprotocol Net-Library, consider the following: The Multiprotocol Net-Library does not support named instances of SQL Server 2000. You can use the Multiprotocol Net-Library to connect to the default instance of SQL Server on a computer, but you cannot connect to any named instances. The Multiprotocol Net-Library does not support server enumeration. From applications that can list servers by calling dbserverenum, you cannot identify servers running an instance of SQL Server and listening on the Multiprotocol Net-Library. ---------------------------------------------------------------------- 26. (Traditional) Client connections to SQL Server: ---------------------------------------------------------------------- ------- ------- ------- ------- |App 1| |App 2| |App 3| |App 4| ------- ------- ------- ------- | | | | | ------- ------- | | |ADO | |RDO | | | ------- ------- | | | | | ----------------- --------------- |OLE DB | |ODBC | (TabularDataStream TDS) ----------------- --------------- | | ----------------------------------- |Client Network library api | |- named pipes | |- tcpip sockets | |- multiprotocol | ----------------------------------- | network | tcp/ip, spx/ipx etc.. ---------------------------------------------------------- | | ----------------------------------- |SQL Server network library | ----------------------------------- | ----------------- |SQL Server | (TDS) ----------------- ---------------------------------------------------------------------- 27. Example of a complete program, implemented as a SP: ---------------------------------------------------------------------- CREATE PROCEDURE stp_Mig_Sol_Revised AS /**********************************************************************/ /* PURPOSE: MIGRATION OF SOLID TO SQLServer. Revised Procedure. */ /* STATUS : READY FOR PRODUCTION. */ /* VERSION: 1.0 */ /* */ /* DATE : 21-04-2004 */ /* AUTHOR : AvdS */ /* -------------------------------------------------- */ /* */ /* COMPLETELY REVISED PROCEDURE. NOW WE USE DTS TO LOAD DIRECTLY */ /* FROM SOLID TABLES TO THE MSSQL TABLES, INSTEAD OF OUTPUTTING FROM */ /* SOLID WITH SOLEXP TO TXTFILES, THEN USING bcp FOR IMPORT, SCANNING */ /* FIELDS ON UNWANTED CHARACTERS, AND ENDLESS SCRUBBING AND */ /* TRANSFORMING FIELDS. */ /* */ /* THE BUSINESS LOGIC IS STILL THE SAME. */ /* */ /* You may name this procedure anything you like. */ /* */ /**********************************************************************/ -- BEFORE RUNNING THIS PROCEDURE CONSIDER THE FOLLOWING: -- ----------------------------------------------------- -- 1. THE DTS PACKAGES (9 PACKAGES) SHOULD BE LOCATED IN C:\EXP_SOLID -- IF THE PACKEGES ARE LOCATED ELSEWHERE, YOU MUST FIND/REPLACE THE PATH C:\EXP_SOLID IN THIS PROCEDURE, WITH THE CORRECT PATH. -- 2. THE STANDARD MSSQL TOOL "DTSRUN.EXE" EXECUTABLE MUST BE PRESENT ON THE TARGET SYSTEM -- 3. THE SOLID SERVER 2.x SHOULD BE UP AND RUNNING. -- 4. THE SOLID ODBC DRIVER SHOULD BE PRESENT ON THE TARGET SYSTEM. SET ANSI_WARNINGS OFF SET NOCOUNT ON /*********************************************************/ /*STEP 1. VARIABLE DECLARATIONS. */ /*********************************************************/ -- VARIABLES FOR PROCESSING DECLARE @TEMPTAB VARCHAR(256) DECLARE @TEMPTAB2 VARCHAR(256) DECLARE @length_table VARCHAR(256) DECLARE @FK VARCHAR(128) DECLARE @REFERENCED VARCHAR(128) DECLARE @ART_NR VARCHAR(128) DECLARE @EENH_NM VARCHAR(128) DECLARE @COUNT_ARTICLES INT -- VARIABLES FOR ERROR AND REPORTING: DECLARE @ERR_MESSAGE VARCHAR(256) DECLARE @LOGSTRING VARCHAR(256) DECLARE @LOGFILE VARCHAR(256) DECLARE @DTS_PRESENT INT DECLARE @ODBC_PRESENT INT DECLARE @_NG39AFGP_B INT DECLARE @_NG39LEV_B INT DECLARE @_NG39ART_B INT DECLARE @_NG39TOES_B INT DECLARE @_NG39BUDG_B INT DECLARE @_NG39MLT_B INT DECLARE @_NG39USER_B INT DECLARE @_NG39EENH_B INT DECLARE @_NG39SYST_B INT DECLARE @_NG39AFGP_A INT DECLARE @_NG39LEV_A INT DECLARE @_NG39ART_A INT DECLARE @_NG39TOES_A INT DECLARE @_NG39BUDG_A INT DECLARE @_NG39MLT_A INT DECLARE @_NG39USER_A INT DECLARE @_NG39EENH_A INT DECLARE @_NG39SYST_A INT DECLARE @DIFF_NG39AFGP INT DECLARE @DIFF_NG39LEV INT DECLARE @DIFF_NG39ART INT DECLARE @DIFF_NG39TOES INT DECLARE @DIFF_NG39BUDG INT DECLARE @DIFF_NG39MLT INT DECLARE @DIFF_NG39USER INT DECLARE @DIFF_NG39EENH INT DECLARE @DIFF_NG39SYST INT /*********************************************************/ /*STEP 2. SIMPLE CHECKs ON SOME PRELIMININARIES. */ /*********************************************************/ -- IF YOU DO NOT LIKE STEP 2, YOU CAN WIPE IT OUT ENTIRELY. -- CHECK ON C:\EXP_SOLID -- --------------------- EXEC @DTS_PRESENT = master.dbo.xp_cmdshell 'dir c:\exp_solid\*.dts' IF (@DTS_PRESENT <> 0) BEGIN SET @ERR_MESSAGE='DTS packages not found in c:\exp_solid. Procedure aborted.' GOTO error_section END -- CHECK ON SOLID 2.x ODBC DRIVER -- ------------------------------ EXEC @ODBC_PRESENT = master.dbo.xp_cmdshell 'dir %SYSTEMROOT%\system32\sosw*.*' IF (@ODBC_PRESENT <> 0) BEGIN SET @ERR_MESSAGE='SOLID ODBC DRIVER PROBABLY NOT INSTALLED. Procedure aborted.' GOTO error_section END /************************************************************/ /*STEP 3. REMOVE POSSIBLY EXISTING STAGING TEMP TABLES. */ /************************************************************/ DECLARE cur1 CURSOR FOR SELECT name FROM sysobjects WHERE name like 'ST%' AND xtype='U' OPEN cur1 FETCH NEXT FROM cur1 INTO @TEMPTAB WHILE (@@FETCH_STATUS<>-1) BEGIN exec ('DROP TABLE '+@TEMPTAB) FETCH NEXT FROM cur1 INTO @TEMPTAB END CLOSE cur1 DEALLOCATE cur1 IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error: error dropping temporary staging tables.' GOTO error_section END /************************************************************/ /*STEP 4. DROP FOREIGN KEY CONSTRAINTS. */ /************************************************************/ if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39AFGR_NG39AFG]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39AFGR] DROP CONSTRAINT FK_NG39AFGR_NG39AFG if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39AFG_NG39AFGP]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39AFG] DROP CONSTRAINT FK_NG39AFG_NG39AFGP if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39AFGR_NG39ART]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39AFGR] DROP CONSTRAINT FK_NG39AFGR_NG39ART if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39ONTR_NG39ART]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39ONTR] DROP CONSTRAINT FK_NG39ONTR_NG39ART if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39DGLB_NG39BUDG]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39DGLB] DROP CONSTRAINT FK_NG39DGLB_NG39BUDG if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39AFG_NG39DGLB]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39AFG] DROP CONSTRAINT FK_NG39AFG_NG39DGLB if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39DSTK_NG39DGLB]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39DSTK] DROP CONSTRAINT FK_NG39DSTK_NG39DGLB if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39DTOE_NG39DGLB]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39DTOE] DROP CONSTRAINT FK_NG39DTOE_NG39DGLB if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39SCHF_NG39DGLB]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39SCHF] DROP CONSTRAINT FK_NG39SCHF_NG39DGLB if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39SCHF_NG39DSTK]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39SCHF] DROP CONSTRAINT FK_NG39SCHF_NG39DSTK if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39ONT_NG39LEV]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39ONT] DROP CONSTRAINT FK_NG39ONT_NG39LEV if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39DSTK_NG39MLT]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39DSTK] DROP CONSTRAINT FK_NG39DSTK_NG39MLT if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39ONTR_NG39ONT]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39ONTR] DROP CONSTRAINT FK_NG39ONTR_NG39ONT if exists (select * from dbo.sysobjects where id = object_id(N'[dbo].[FK_NG39DTOE_NG39TOES]') and OBJECTPROPERTY(id, N'IsForeignKey') = 1) ALTER TABLE [dbo].[NG39DTOE] DROP CONSTRAINT FK_NG39DTOE_NG39TOES /************************************************************/ /*STEP 5. CREATE STAGING TABLES. */ /************************************************************/ -- WE DO NOT USE #tablename or ##tablename TEMPOPARY TABLES, BUT -- TRUE DATABASE TABLES. IN THIS CASE, COLLATION CONFLICTS ARE IMPOSSIBLE. -- WE DO THIS TO AVOID ANY PROBLEM. -- AFTERWARDS, ALL TEMPOPARY TABLES ARE DROPPED. -- TEMP STAGING TABLES: -- -------------------- CREATE TABLE [dbo].[STNG39AFGP] ( [AFG_PNT_NR] [int] NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [AFG_PNT_OMS] [nvarchar] (40) NULL , [AFG_PNT_VAST] [char] (1) NULL -- will be converted to bit ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39AFGP. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39EENH] ( [EENH_NM] [nvarchar] (92) NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [VLG_OFF_NM] [nvarchar] (92) NULL , [VLG_OFF_RNG] [nvarchar] (15) NULL , [EENH_CMD_NM] [nvarchar] (92) NULL , [EENH_CMD_RNG] [nvarchar] (15) NULL , [EENH_HLD_NM] [nvarchar] (92) NULL , [EENH_HLD_RNG] [nvarchar] (15) NULL , [OFF_GNKD_NM] [nvarchar] (92) NULL , [OFF_GNKD_RNG] [nvarchar] (15) NULL , [EENH_PRG_VRL] [float] NULL , [EENH_BTW_H] [float] NOT NULL , [EENH_BTW_L] [float] NOT NULL , [EENH_VERW_DT] [smalldatetime] NULL , [EENH_STER_DT] [smalldatetime] NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39EENH. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39LEV] ( [LEV_NR] [int] NOT NULL , -- IN BBV WE HAVE THE IDENTITY (1,1) PROPERTY BOUND TO THIS FIELD [U_VERSION] [nvarchar] (1) NULL , [LEV_NM] [nvarchar] (30) NOT NULL , [LEV_ADR] [nvarchar] (30) NULL , [LEV_PC] [nvarchar] (6) NULL , [LEV_PLTS] [nvarchar] (42) NULL , [LEV_REGMAG] [char](1) NOT NULL -- will be converted to bit -- [LEV_BTLND] [bit] NOT NULL -- COLUMN NOT PRESENT IN SOLID ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39LEV. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39MLT] ( [MLT_NR] [int] NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [MLT_NM] [nvarchar] (30) NOT NULL , [MLT_PR] [money] NOT NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39MLT. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39TOES] ( [TOES_NR] [int] NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [TOES_BDR] [money] NOT NULL -- [TOES_OMS] [varchar] (500) NULL -- COLUMN IN SOLID NOT PRESENT -- [TOES_URL] [varchar] (100) NULL -- COLUMN IN SOLID NOT PRESENT ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39TOES. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39USER] ( [USR_ID] [nvarchar] (12) NOT NULL , [USR_GROEP] [nvarchar] (6) NULL , [USR_NAAM] [nvarchar] (60) NULL , [USR_PASSWORD] [nvarchar] (32) NOT NULL , [USR_PASS_DT] [smalldatetime] NULL , [USR_PASS_PER] [int] NOT NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39USER. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39ART] ( [ART_NR] [nvarchar] (7) NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [ART_OMS] [nvarchar] (55) NULL , [ART_VRP_EENH] [nvarchar] (3) NOT NULL , [ART_AANT] [decimal](8, 2) NOT NULL , [ART_PR] [money] NOT NULL , [ART_BTW] [nvarchar] (1) NULL , [ART_STATUS] [nvarchar] (1) NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39ART. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39BUDG] ( [BUD_PER_NR] [int] NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [BUD_BEG_SAL] [money] NULL , [BUD_END_SAL] [money] NULL , [BUD_ASL_IND] [nvarchar] (1) NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39BUDG. Procedure aborted.' GOTO error_section END CREATE TABLE [dbo].[STNG39SYST] ( [ID] [nvarchar] (10) NOT NULL , [U_VERSION] [nvarchar] (1) NULL , [WAARDE] [nvarchar] (40) NULL ) ON [PRIMARY] IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error in creating temp table STNG39SYST. Procedure aborted.' GOTO error_section END /************************************************************/ /*STEP 6. LOAD DATA FROM SOLID TO STAGING TABLES WITH DTS. */ /************************************************************/ -- INFO: HOW TO RUN DTS PACKAGES: -- FROM SQL: -- --------- -- exec xp_cmdshell "DTSRun /S servername /U username /P password /Fpackagename" -- Or: -- exec xp_cmdshell "DTSRun /S servername /E /Fpackagename" -- /E : trusted connection, or use /U username /P password exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39AFGP.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39ART.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39BUDG.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39EENH.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39LEV.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39MLT.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39SYST.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39TOES.dts" exec master.dbo.xp_cmdshell "DTSRun /E /FC:\exp_solid\INIT_STNG39USER.dts" IF (SELECT COUNT(*) FROM STNG39ART) = 0 BEGIN SET @ERR_MESSAGE='Error in running DTS datatransfer. Procedure aborted.' GOTO error_section END /************************************************************/ /*STEP 7. CHECK ON DUPLICATE ARTICLES. */ /************************************************************/ -- LET'S FIRST CHECK ON DUPLICATE ARTICLES IN STAGING TABLE STNG39ART. -- IF DUPLICATE ARTICLES ARE FOUND, WE ABORT THIS PROCEDURE -- AND NOTHING WILL BE CHANGED. IF EXISTS (SELECT COUNT(ART_NR) FROM STNG39ART GROUP BY ART_NR HAVING COUNT(*) > 1) BEGIN PRINT 'DUPLICATE ARTICLES FOUND:' SELECT ART_NR FROM STNG39ART GROUP BY ART_NR HAVING COUNT(*) > 1 SET @ERR_MESSAGE='Duplicate Article_numbers found in Staging Artikelentabel. Procedure aborted.' GOTO error_section END /************************************************************/ /*STEP 8. TRANSFORMATIONS */ /************************************************************/ -- SOME TRANSFORMATIONS ARE STILL NEEDED. -- 8.1. SPECIAL CASES OF BIT VALUES "T" AND "F". -- --------------------------------------------- -- STAGING TABLE STNG39AFGP HAS THE COLUMN [AFG_PNT_VAST] (on purpose) SET TO [char] (1) -- BECAUSE THAT IS THE SAME AS THE CORRESPONDING TABLE IN SOLID (NG39AFGP). -- THAT COLUMN CONTAINS VALUES AS "T"AND "F". -- SO HERE WE REBUILD THOSE VALUES TO "1" AND "0". -- A SIMILAR ARGUMENT IS TRUE FOR STNG39LEV. -- STNG39AFGP: -- ------------ IF (SELECT COUNT(*) FROM STNG39AFGP WHERE AFG_PNT_VAST='T') > 0 BEGIN UPDATE STNG39AFGP SET AFG_PNT_VAST ='1' WHERE AFG_PNT_VAST='T' END IF (SELECT COUNT(*) FROM STNG39AFGP WHERE AFG_PNT_VAST='F') > 0 BEGIN UPDATE STNG39AFGP SET AFG_PNT_VAST ='0' WHERE AFG_PNT_VAST='F' END -- STNG39LEV: -- ---------- IF (SELECT COUNT(*) FROM STNG39LEV WHERE LEV_REGMAG='T') > 0 BEGIN UPDATE STNG39LEV SET LEV_REGMAG ='1' WHERE LEV_REGMAG='T' END IF (SELECT COUNT(*) FROM STNG39LEV WHERE LEV_REGMAG='F') > 0 BEGIN UPDATE STNG39LEV SET LEV_REGMAG ='0' WHERE LEV_REGMAG='F' END /****************************************************************************/ /*STEP 9. EMPTY THE INVOLVED PRODUCTION TABLES IN SQL Server. */ /****************************************************************************/ -- 9.1 FIRST LET'S COUNT THE ORIGINAL NO OF RECORDS IN PRODUCTION TABLES FOR REPORTING PURPOSES, -- BEFORE IMPORT TAKES PLACE. -- --------------------------------------------------------------------------------------------- SELECT @_NG39AFGP_B = (SELECT COUNT(*) FROM NG39AFGP) SELECT @_NG39LEV_B = (SELECT COUNT(*) FROM NG39LEV) SELECT @_NG39ART_B = (SELECT COUNT(*) FROM NG39ART) SELECT @_NG39TOES_B = (SELECT COUNT(*) FROM NG39TOES) SELECT @_NG39BUDG_B = (SELECT COUNT(*) FROM NG39BUDG) SELECT @_NG39MLT_B = (SELECT COUNT(*) FROM NG39MLT) SELECT @_NG39USER_B = (SELECT COUNT(*) FROM NG39USER) SELECT @_NG39EENH_B = (SELECT COUNT(*) FROM NG39EENH) SELECT @_NG39SYST_B = (SELECT COUNT(*) FROM NG39SYST) -- 9.2 EMPTY TABLES (MOST LIKELY THEY ARE ALREADY EMPTY) -- ----------------------------------------------------- -- IF YOU HAVE CONCERNS ABOUT THE SIZE OF THE TRANSACTION LOG, YOU COULD INSTEAD USE TRUNCATE STATEMENTS. -- HERE WE PUT THE DELETES INTO A TRANSACTION. -- PRODUCTION TABLES ARE PROBABLY EMPTY TO START WITH. -- WE ONLY EMPTY THE TABLES THAT WILL RECEIVE THE MIGRATED RECORDS FROM SOLID. BEGIN TRAN EMPTY DELETE FROM NG39AFGP DELETE FROM NG39LEV DELETE FROM NG39ART DELETE FROM NG39TOES DELETE FROM NG39BUDG DELETE FROM NG39MLT DELETE FROM NG39USER DELETE FROM NG39EENH DELETE FROM NG39SYST DELETE FROM NG39DGLB IF @@error=0 BEGIN COMMIT TRAN EMPTY END ELSE BEGIN ROLLBACK TRAN EMPTY SET @ERR_MESSAGE='Removing old data from tables did not succeed.' GOTO error_section END /**************************************************************************/ /*STEP 10. NOW COPY THE DATA FROM STAGING TABLES INTO PRODUCTION TABLES. */ /**************************************************************************/ -- --------------------------------------- INSERT INTO NG39AFGP SELECT CONVERT(INT,AFG_PNT_NR), U_VERSION, AFG_PNT_OMS, CONVERT(bit,AFG_PNT_VAST) FROM STNG39AFGP IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39AFGP' GOTO error_section END -- --------------------------------------- SET IDENTITY_INSERT NG39LEV ON INSERT INTO NG39LEV (LEV_NR, U_VERSION,LEV_NM,LEV_ADR,LEV_PC,LEV_PLTS,LEV_REGMAG,LEV_BTLND) SELECT CONVERT(int,LEV_NR), U_VERSION, LEV_NM, LEV_ADR, LEV_PC, LEV_PLTS, CONVERT(bit,LEV_REGMAG), 0 FROM STNG39LEV SET IDENTITY_INSERT NG39LEV OFF IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39LEV' GOTO error_section END -- --------------------------------------- INSERT INTO NG39ART SELECT ART_NR, U_VERSION, ART_OMS, ART_VRP_EENH, CONVERT(decimal (8,2),ART_AANT), CONVERT(money,CONVERT(float,ART_PR)), ART_BTW, ART_STATUS FROM STNG39ART UPDATE NG39ART SET ART_PR=(ART_PR/100) IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39ART' GOTO error_section END -- --------------------------------------- INSERT INTO NG39TOES (TOES_NR,U_VERSION,TOES_BDR) SELECT CONVERT(int,TOES_NR), U_VERSION, CONVERT(money,CONVERT(float,TOES_BDR)) --, -- TOES_OMS, -- TOES_URL FROM STNG39TOES UPDATE NG39TOES SET TOES_BDR=(TOES_BDR/100) IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39TOES' GOTO error_section END -- --------------------------------------- INSERT INTO NG39BUDG SELECT CONVERT(int,BUD_PER_NR), U_VERSION, CONVERT(money,CONVERT(float,BUD_BEG_SAL)), CONVERT(money,CONVERT(float,BUD_END_SAL)), BUD_ASL_IND FROM STNG39BUDG IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39BUDG' GOTO error_section END -- --------------------------------------- INSERT INTO NG39MLT SELECT CONVERT(int,MLT_NR), U_VERSION, MLT_NM, CONVERT(money,CONVERT(float,MLT_PR)) FROM STNG39MLT UPDATE NG39MLT SET MLT_PR=(MLT_PR/100) /* UPDATE NG39MLT SET MLT_PR=(MLT_PR/(100+(EENH_BTW_H/100)) * 100) where ART_BTW = "H"; UPDATE NG39MLT SET MLT_PR=(MLT_PR/(100+(EENH_BTW_L/100)) * 100) where ART_BTW = "L"; */ IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39MLT' GOTO error_section END -- --------------------------------------- INSERT INTO NG39EENH SELECT EENH_NM, U_VERSION, VLG_OFF_NM, VLG_OFF_RNG, EENH_CMD_NM, EENH_CMD_RNG, EENH_HLD_NM, EENH_HLD_RNG, OFF_GNKD_NM, OFF_GNKD_RNG, CONVERT(float,EENH_PRG_VRL), CONVERT(float,EENH_BTW_H), CONVERT(float,EENH_BTW_L), CONVERT(smalldatetime,EENH_VERW_DT), CONVERT(smalldatetime,EENH_STER_DT) FROM STNG39EENH UPDATE NG39EENH SET EENH_PRG_VRL=(EENH_PRG_VRL/10), EENH_BTW_H=(EENH_BTW_H/10), EENH_BTW_L=(EENH_BTW_L/10) UPDATE NG39EENH SET VLG_OFF_NM='' WHERE VLG_OFF_NM='UL' UPDATE NG39EENH SET VLG_OFF_RNG='' WHERE VLG_OFF_RNG='UL' IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39EENH' GOTO error_section END -- Updaten van Prijzen. Was Incl. BTW nu ex BTW maken. DECLARE @EENH_BTW_H float DECLARE @EENH_BTW_L float SELECT @EENH_BTW_H = (Select EENH_BTW_H from NG39EENH) SELECT @EENH_BTW_L = (Select EENH_BTW_L from NG39EENH) UPDATE NG39ART SET ART_PR = (ART_PR / (100 + @EENH_BTW_H)) * 100 WHERE (ART_BTW = N'H') UPDATE NG39ART SET ART_PR = (ART_PR / (100 + @EENH_BTW_L)) * 100 WHERE (ART_BTW = N'L') -- --------------------------------------- INSERT INTO NG39USER SELECT USR_ID, USR_GROEP, USR_NAAM, 'Y2djWZxBe9M=', --USR_PASSWORD, CONVERT(smalldatetime,'01-01-2002'), 1 -- CONVERT(int,USR_PASS_PER) FROM STNG39USER IF ((SELECT COUNT(usr_id) FROM NG39user where usr_id = 'fsb') = 1) BEGIN PRINT 'FSB user found' END ELSE begin PRINT 'FSB user NOT found' INSERT INTO NG39USER (USR_ID, USR_PASSWORD, USR_PASS_DT, USR_PASS_PER) VALUES (N'FSB', 'i03E6cJh5hmEkqBhqiRl9g==', '1-1-2050', 4) END IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39USER' GOTO error_section END -- --------------------------------------- INSERT INTO NG39SYST SELECT * FROM STNG39SYST IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error loading NG39SYST' GOTO error_section END -- NOW LET'S COUNT THE NO OF RECORDS IN PRODUCTION TABLES AFTER IMPORT. -- -------------------------------------------------------------------- SELECT @_NG39AFGP_A = (SELECT COUNT(*) FROM NG39AFGP) SELECT @_NG39LEV_A = (SELECT COUNT(*) FROM NG39LEV) SELECT @_NG39ART_A = (SELECT COUNT(*) FROM NG39ART) SELECT @_NG39TOES_A = (SELECT COUNT(*) FROM NG39TOES) SELECT @_NG39BUDG_A = (SELECT COUNT(*) FROM NG39BUDG) SELECT @_NG39MLT_A = (SELECT COUNT(*) FROM NG39MLT) SELECT @_NG39USER_A = (SELECT COUNT(*) FROM NG39USER) SELECT @_NG39EENH_A = (SELECT COUNT(*) FROM NG39EENH) SELECT @_NG39SYST_A = (SELECT COUNT(*) FROM NG39SYST) -- IF WE DO NOT WANT THE U_VERSION DATA, WE CAN DO THE FOLLOWING: -- UPDATE NG39AFGP SET U_VERSION='NULL' -- UPDATE NG39LEV SET U_VERSION='NULL' -- UPDATE NG39ART SET U_VERSION='NULL' -- UPDATE NG39TOES SET U_VERSION='NULL' -- UPDATE NG39BUDG SET U_VERSION='NULL' -- UPDATE NG39MLT SET U_VERSION='NULL' -- UPDATE NG39USER SET U_VERSION='NULL' -- UPDATE NG39EENH SET U_VERSION='NULL' /***************************************************************************/ /*STEP 12. ENABLING FOREIGN KEYS AGAIN. */ /***************************************************************************/ DECLARE @FK_NG39AFGR_NG39AFG INT DECLARE @FK_NG39AFG_NG39AFGP INT DECLARE @FK_NG39AFGR_NG39ART INT DECLARE @FK_NG39ONTR_NG39ART INT DECLARE @FK_NG39DGLB_NG39BUDG INT DECLARE @FK_NG39SCHF_NG39DGLB INT DECLARE @FK_NG39AFG_NG39DGLB INT DECLARE @FK_NG39DSTK_NG39DGLB INT DECLARE @FK_NG39DTOE_NG39DGLB INT DECLARE @FK_NG39SCHF_NG39DSTK INT DECLARE @FK_NG39ONT_NG39LEV INT DECLARE @FK_NG39DSTK_NG39MLT INT DECLARE @FK_NG39ONTR_NG39ONT INT DECLARE @FK_NG39DTOE_NG39TOES INT -- ----------------------------- -- CREATE FKs IN BBV: -- ----------------------------- ALTER TABLE [dbo].[NG39ONTR] ADD CONSTRAINT [FK_NG39ONTR_NG39ART] FOREIGN KEY ([ART_NR]) REFERENCES [dbo].[NG39ART] ([ART_NR]) ALTER TABLE [dbo].[NG39ONTR] ADD CONSTRAINT [FK_NG39ONTR_NG39ONT] FOREIGN KEY ([ONT_BON_NR]) REFERENCES [dbo].[NG39ONT] ([ONT_BON_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39AFG] ADD CONSTRAINT [FK_NG39AFG_NG39AFGP] FOREIGN KEY ([AFG_PNT_NR]) REFERENCES [dbo].[NG39AFGP] ([AFG_PNT_NR]) ALTER TABLE [dbo].[NG39AFG] ADD CONSTRAINT [FK_NG39AFG_NG39DGLB] FOREIGN KEY ([DGLB_STER_DT]) REFERENCES [dbo].[NG39DGLB] ([DGLB_STER_DT]) -- ----------------------------- ALTER TABLE [dbo].[NG39AFGR] ADD CONSTRAINT [FK_NG39AFGR_NG39AFG] FOREIGN KEY ([AFG_PNT_NR],[AFG_BON_NR]) REFERENCES [dbo].[NG39AFG] ([AFG_PNT_NR],[AFG_BON_NR]) ALTER TABLE [dbo].[NG39AFGR] ADD CONSTRAINT [FK_NG39AFGR_NG39ART] FOREIGN KEY ([ART_NR]) REFERENCES [dbo].[NG39ART] ([ART_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39DGLB] ADD CONSTRAINT [FK_NG39DGLB_NG39BUDG] FOREIGN KEY ([BUD_PER_NR]) REFERENCES [dbo].[NG39BUDG] ([BUD_PER_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39DSTK] ADD CONSTRAINT [FK_NG39DSTK_NG39DGLB] FOREIGN KEY ([DGLB_STER_DT]) REFERENCES [dbo].[NG39DGLB] ([DGLB_STER_DT]) ALTER TABLE [dbo].[NG39DSTK] ADD CONSTRAINT [FK_NG39DSTK_NG39MLT] FOREIGN KEY ([MLT_NR]) REFERENCES [dbo].[NG39MLT] ([MLT_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39DTOE] ADD CONSTRAINT [FK_NG39DTOE_NG39DGLB] FOREIGN KEY ([DGLB_STER_DT]) REFERENCES [dbo].[NG39DGLB] ([DGLB_STER_DT]) ALTER TABLE [dbo].[NG39DTOE] ADD CONSTRAINT [FK_NG39DTOE_NG39TOES] FOREIGN KEY ([TOES_NR]) REFERENCES [dbo].[NG39TOES] ([TOES_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39ONT] ADD CONSTRAINT [FK_NG39ONT_NG39LEV] FOREIGN KEY ([LEV_NR]) REFERENCES [dbo].[NG39LEV] ([LEV_NR]) -- ----------------------------- ALTER TABLE [dbo].[NG39SCHF] ADD CONSTRAINT [FK_NG39SCHF_NG39DGLB] FOREIGN KEY ([DGLB_STER_DT]) REFERENCES [dbo].[NG39DGLB] ([DGLB_STER_DT]) ALTER TABLE [dbo].[NG39SCHF] ADD CONSTRAINT [FK_NG39SCHF_NG39DSTK] FOREIGN KEY ([DGLB_STER_DT],[MLT_NR]) REFERENCES [dbo].[NG39DSTK] ([DGLB_STER_DT],[MLT_NR]) -- ----------------------------- /****************************************************************************/ /*STEP 13. REPORT OF THE CONVERSION. */ /****************************************************************************/ SELECT @DIFF_NG39AFGP = (SELECT COUNT(*) FROM NG39AFGP)-(SELECT COUNT(*) FROM STNG39AFGP) SELECT @DIFF_NG39LEV = (SELECT COUNT(*) FROM NG39LEV)-(SELECT COUNT(*) FROM STNG39LEV) SELECT @DIFF_NG39ART = (SELECT COUNT(*) FROM NG39ART)-(SELECT COUNT(*) FROM STNG39ART) SELECT @DIFF_NG39TOES = (SELECT COUNT(*) FROM NG39TOES)-(SELECT COUNT(*) FROM STNG39TOES) SELECT @DIFF_NG39BUDG = (SELECT COUNT(*) FROM NG39BUDG)-(SELECT COUNT(*) FROM STNG39BUDG) SELECT @DIFF_NG39MLT = (SELECT COUNT(*) FROM NG39MLT)-(SELECT COUNT(*) FROM STNG39MLT) SELECT @DIFF_NG39USER = (SELECT COUNT(*) FROM NG39USER)-(SELECT COUNT(*) FROM STNG39USER) SELECT @DIFF_NG39EENH = (SELECT COUNT(*) FROM NG39EENH)-(SELECT COUNT(*) FROM STNG39EENH) SELECT @DIFF_NG39SYST = (SELECT COUNT(*) FROM NG39SYST)-(SELECT COUNT(*) FROM STNG39SYST) PRINT '************* REPORT OF CONVERSION *****************' PRINT ' ' PRINT ' ' PRINT 'NO OF RECORDS IN PRODUCTION TABLES BEFORE IMPORT :' PRINT '-------------------------------------------------------------' PRINT 'NO OF RECORDS IN NG39AFGP :'+ convert(varchar(10),@_NG39AFGP_B) PRINT 'NO OF RECORDS IN NG39LEV :'+ convert(varchar(10),@_NG39LEV_B) PRINT 'NO OF RECORDS IN NG39ART :'+ convert(varchar(10),@_NG39ART_B) PRINT 'NO OF RECORDS IN NG39TOES :'+ convert(varchar(10),@_NG39TOES_B) PRINT 'NO OF RECORDS IN NG39BUDG :'+ convert(varchar(10),@_NG39BUDG_B) PRINT 'NO OF RECORDS IN NG39MLT :'+ convert(varchar(10),@_NG39MLT_B) PRINT 'NO OF RECORDS IN NG39USER :'+ convert(varchar(10),@_NG39USER_B) PRINT 'NO OF RECORDS IN NG39EENH :'+ convert(varchar(10),@_NG39EENH_B) PRINT 'NO OF RECORDS IN NG39SYST :'+ convert(varchar(10),@_NG39SYST_B) PRINT ' ' PRINT 'NO OF RECORDS IN PRODUCTION TABLES AFTER IMPORT :' PRINT '-------------------------------------------------------------' PRINT 'NO OF RECORDS IN NG39AFGP :'+ convert(varchar(10),@_NG39AFGP_A) PRINT 'NO OF RECORDS IN NG39LEV :'+ convert(varchar(10),@_NG39LEV_A) PRINT 'NO OF RECORDS IN NG39ART :'+ convert(varchar(10),@_NG39ART_A) PRINT 'NO OF RECORDS IN NG39TOES :'+ convert(varchar(10),@_NG39TOES_A) PRINT 'NO OF RECORDS IN NG39BUDG :'+ convert(varchar(10),@_NG39BUDG_A) PRINT 'NO OF RECORDS IN NG39MLT :'+ convert(varchar(10),@_NG39MLT_A) PRINT 'NO OF RECORDS IN NG39USER :'+ convert(varchar(10),@_NG39USER_A) PRINT 'NO OF RECORDS IN NG39EENH :'+ convert(varchar(10),@_NG39EENH_A) PRINT 'NO OF RECORDS IN NG39SYST :'+ convert(varchar(10),@_NG39SYST_A) PRINT ' ' PRINT 'NO OF RECORDS IN PRODUCTION TABLES AFTER IMPORT COMPARED TO STAGING TABLES:' PRINT '---------------------------------------------------------------------------' PRINT 'DIFF RECORDS NG39AFGP :'+ convert(varchar(10),@DIFF_NG39AFGP) PRINT 'DIFF RECORDS NG39LEV :'+ convert(varchar(10),@DIFF_NG39LEV) PRINT 'DIFF RECORDS NG39ART :'+ convert(varchar(10),@DIFF_NG39ART) PRINT 'DIFF RECORDS NG39TOES :'+ convert(varchar(10),@DIFF_NG39TOES) PRINT 'DIFF RECORDS NG39BUDG :'+ convert(varchar(10),@DIFF_NG39BUDG) PRINT 'DIFF RECORDS NG39MLT :'+ convert(varchar(10),@DIFF_NG39MLT) PRINT 'DIFF RECORDS NG39USER :'+ convert(varchar(10),@DIFF_NG39USER) PRINT 'DIFF RECORDS NG39EENH :'+ convert(varchar(10),@DIFF_NG39EENH) PRINT 'DIFF RECORDS NG39SYST :'+ convert(varchar(10),@DIFF_NG39SYST) /***************************************************************************/ /*STEP 14. DROP THE STAGING TABLES. */ /***************************************************************************/ DECLARE cur1 CURSOR FOR SELECT name FROM sysobjects WHERE name like 'ST%' AND xtype='U' OPEN cur1 FETCH NEXT FROM cur1 INTO @TEMPTAB WHILE (@@FETCH_STATUS<>-1) BEGIN exec ('DROP TABLE '+@TEMPTAB) FETCH NEXT FROM cur1 INTO @TEMPTAB END CLOSE cur1 DEALLOCATE cur1 IF @@error > 0 BEGIN SET @ERR_MESSAGE='Error: error dropping temporary staging tables.' GOTO error_section END RETURN -- error_section: PRINT @ERR_MESSAGE RETURN -- END OF PROCEDURE GO ---------------------------------------------------------------------- 28. Another example of a complete program, implemented as a SP: ---------------------------------------------------------------------- CREATE PROCEDURE IMP @importpath VARCHAR(128) = null AS /*******************************************************/ /* VERSION: 7.0 */ /* DATE : 11-08-2003 */ /* STATUS : FOR TEST/ACCEPTANCE */ /* PURPOSE: LOAD NEW DATA INTO ARTICLE TABLE NG39ART */ /* Author : AvdS */ /* NOTES : 1.Uses a user supplied path to the upload */ /* file, as a parameter of this procedure. */ /* 2.Does not use supplied flags for */ /* update logic. */ /* 3.Logging set to C:\TEMP\LOAD_FILE.LOG */ /* which can be changed to otherlocation. */ /* 4.The user must save it's Excel sheet */ /* as a tab delimited DOS text file. */ /* ------------------------------------------------- */ /* Changes: */ /* */ /*******************************************************/ SET ANSI_WARNINGS OFF SET NOCOUNT ON /*******************************/ /* STEP 1. VARIABLE DECLARIONS */ /*******************************/ -- VARIABLES USED FOR DATA IMPORT DECLARE @ART_NR VARCHAR(30) DECLARE @ART_OMS VARCHAR(30) DECLARE @ART_VRP_EENH VARCHAR(3) DECLARE @ART_PR_EX money DECLARE @ART_PR_IN money DECLARE @ART_AANT float DECLARE @FK VARCHAR(128) DECLARE @REFERENCED VARCHAR(128) -- VARIABLES USED FOR DECISION/UPDATE LOGIC DECLARE @ACTION VARCHAR(128) DECLARE @ACTION_LOG VARCHAR(128) DECLARE @ACTION_DATE DATETIME DECLARE @ART_NUM VARCHAR(9) DECLARE @IF_EXIST_REC INT -- VARIABLES USED FOR ERRORHANDLING DECLARE @MESSAGE VARCHAR(128) DECLARE @RESULT INT DECLARE @COUNT_BEFORE INT DECLARE @COUNT_BCP_LOAD INT DECLARE @COUNT_STAGE_LOAD INT DECLARE @COUNT_AFTER INT DECLARE @UPDATES_OK INT DECLARE @UPDATES_WRONG INT DECLARE @DELETES INT DECLARE @HERNUMMERING INT DECLARE @UPDATE_ALL_FIELDS INT DECLARE @UPDATE_PRICE INT -- VARIABLES NEEDED FOR xp_cmdshell DECLARE @LOGSTRING VARCHAR(128) DECLARE @LOGFILE VARCHAR(128) DECLARE @cmd VARCHAR(128) DECLARE @log_cmd VARCHAR(128) DECLARE @totalcommand VARCHAR(128) /*************************************/ /* STEP 2. CHECK ON INPUT PARAMETER. */ /*************************************/ if @importpath IS NULL or (charindex('\', @importpath) = 0) begin Print ' *** Beschrijving laad ascii file procedure *** ' Print ' Met deze procedure kan een ASCII file met updates' Print ' van artikelen in de ARTIKEL tabel (NG39ART) ingelezen worden.' PRINT ' ' Print ' De procedure moet worden gebruikt met een parameter.' PRINT ' Deze parameter is het full path, inclusief de filenaam, ' PRINT ' naar het ASCII bestand.' Print ' Gebruik de procedure als in het onderstaande voorbeeld: ' PRINT ' importtest5 "D:\SOURCE\UPLOAD.TXT" ' RETURN end /**************************************************************/ /* STEP 3. CREATE TEMP TABLES */ /* There will be 2 temporary tables: */ /* */ /* -- ##BCP_LOAD for the initial bulk copy load */ /* -- ##STAGE_LOAD as a copy of BCP_LOAD with errors removed */ /**************************************************************/ -- FIRST WE CREATE A TEMP TABLE ##BCP_LOAD (in tempdb) -- IN ORDER TO LOAD THE DATA FROM THE UPLOAD FILE if exists (select * from tempdb..sysobjects where name like '%##BCP_LOAD%') BEGIN DROP TABLE ##BCP_LOAD END IF @@error > 0 BEGIN SET @MESSAGE='Error with existence or creation of temporary load table.' GOTO error_section END CREATE TABLE ##BCP_LOAD ( [ART_NR] nvarchar(10), [ART_OMS] nvarchar(55), [ART_VRP_EENH] nvarchar(10), [ART_PR_EX] varchar(30), [ART_PR_IN] varchar(30) ) -- SECONDLY WE CREATE A TEMP TABLE ##STAGE_LOAD (in tempdb) -- WHICH WILL HOLD A COPY OF BCP_LOAD WITH POSSIBLE ERRORS REMOVED if exists (select * from tempdb..sysobjects where name like '%##STAGE_LOAD%') BEGIN DROP TABLE ##STAGE_LOAD END IF @@error > 0 BEGIN SET @MESSAGE='Error with existence or creation of temporary load table.' GOTO error_section END CREATE TABLE ##STAGE_LOAD ( [ART_NR] nvarchar(10), [ART_OMS] nvarchar(55), [ART_VRP_EENH] nvarchar(10), [ART_PR_EX] money, [ART_PR_IN] money ) /*******************************************************/ /* STEP 4. NOW LOAD DATA FROM ASCII FILE IN TEMP TABLE */ /*******************************************************/ -- In the bcp command use the -T switch for trusted connection, -- or -Uusername -Ppassword for SQL authentication. -- We use the -F2 switch to skip the first record = heading SELECT @totalcommand='bcp ##BCP_LOAD in '+@importpath+' -c -F2 -T' EXEC @RESULT = master.dbo.xp_cmdshell @totalcommand IF (@RESULT <> 0) BEGIN SET @MESSAGE='Error loading data in temporary table. Possibly wrong path or file not found.' GOTO error_section END SELECT @COUNT_BCP_LOAD=(SELECT COUNT(*) FROM ##BCP_LOAD) /********************************************************************/ /* STEP 5. NOW COPY THE DATA INTO STAGING TABLE AND REMOVING ERRORS */ /********************************************************************/ -- NOW COPY ##BCP_LOAD INTO THE ##STAGE_LOAD TABLE -- WHERE WE ALSO CHECK AND CORRECT FORMATTING CONDITIONS DELETE FROM ##BCP_LOAD WHERE ART_NR IS NULL DELETE FROM ##BCP_LOAD WHERE ART_NR=' ' INSERT INTO ##STAGE_LOAD SELECT ART_NR, LTRIM(ART_OMS), LTRIM(ART_VRP_EENH), convert(money,ART_PR_EX), convert(money, ART_PR_IN) FROM ##BCP_LOAD /********************************************************************/ /* STEP 6. CHECK ON DUPLICATE ART_NR's AND NULL VALUES */ /********************************************************************/ -- NOW CHECK ON DUPLICATE ARTICLES. -- IF DUPLICATE ARTICLES ARE FOUND, WE ABORT THIS PROCEDURE -- AND NOTHING WILL BE CHANGED. IF EXISTS (SELECT COUNT(ART_NR) FROM ##STAGE_LOAD GROUP BY ART_NR HAVING COUNT(*) > 1) BEGIN PRINT 'DUPLICATE ARTICLES FOUND:' SELECT ART_NR FROM ##STAGE_LOAD GROUP BY ART_NR HAVING COUNT(*) > 1 SET @MESSAGE='Duplicate Article_numbers found in uploadfile. Procedure aborted. Nothing changed in database.' GOTO error_section END -- NOW CHECK ON NULL VALUES. -- IF NULL VALUES ARE FOUND, WE ABORT THIS PROCEDURE -- AND NOTHING WILL BE CHANGED. IF (SELECT COUNT(*) FROM ##STAGE_LOAD WHERE ART_OMS IS NULL) >0 OR (SELECT COUNT(*) FROM ##STAGE_LOAD WHERE ART_VRP_EENH IS NULL) >0 OR (SELECT COUNT(*) FROM ##STAGE_LOAD WHERE ART_PR_EX IS NULL) >0 OR (SELECT COUNT(*) FROM ##STAGE_LOAD WHERE ART_PR_IN IS NULL) >0 BEGIN PRINT 'NULL VALUES FOUND:' SELECT ART_NR FROM ##STAGE_LOAD WHERE (ART_OMS IS NULL) OR (ART_VRP_EENH IS NULL) OR (ART_PR_EX IS NULL) OR (ART_PR_IN IS NULL) SET @MESSAGE='NULL values found in uploadfile. Procedure aborted. Nothing changed in database.' GOTO error_section END /***********************************************************/ /* STEP 7. COUNT NUMBER OF RECORDS BEFORE IMPORT */ /***********************************************************/ SELECT @COUNT_BEFORE=(SELECT COUNT(*) FROM NG39ART) SELECT @COUNT_STAGE_LOAD=(SELECT COUNT(*) FROM ##STAGE_LOAD) SELECT @UPDATES_OK=0 SELECT @UPDATES_WRONG=0 SELECT @DELETES=0 SELECT @HERNUMMERING=0 SELECT @UPDATE_ALL_FIELDS=0 SELECT @UPDATE_PRICE=0 /*********************************************************/ /* STEP 8. PROCESS DATA FROM TEMP TABLE TO ARTICLE TABLE */ /*********************************************************/ -- HERE WE ASSUME WE DO NOT HAVE TROUBLE WITH FK CONSTRAINTS -- FROM NG39AFGR AND NG39ONT. -- ELSE, WE DROP OR NOCHECK THE FK CONSTRAINTS AND REBUILD OR CHECK THEM LATER. --ALTER TABLE NG39AFGR DROP CONSTRAINT FK_NG39AFGR_NG39ART --ALTER TABLE NG39ONTR DROP CONSTRAINT FK_NG39ONTR_NG39ART SELECT @ACTION_DATE=GETDATE() -- PART 1. REGULAR PROCESSING -- --------------------------- DECLARE cur1 CURSOR FOR SELECT * FROM ##STAGE_LOAD OPEN cur1 FETCH NEXT FROM cur1 INTO @ART_NR, @ART_OMS, @ART_VRP_EENH, @ART_PR_EX, @ART_PR_IN WHILE (@@FETCH_STATUS<>-1) BEGIN SELECT @IF_EXIST_REC=(SELECT COUNT(*) FROM NG39ART WHERE NG39ART.ART_NR=@ART_NR) SELECT @ART_AANT=(SELECT COUNT(ART_AANT) FROM NG39ART WHERE NG39ART.ART_NR=@ART_NR) IF @IF_EXIST_REC = 0 -- this is a new record BEGIN INSERT INTO NG39ART (ART_NR,ART_OMS,ART_VRP_EENH,ART_PR) VALUES (@ART_NR, UPPER(@ART_OMS), @ART_VRP_EENH, @ART_PR_IN) -- EVALUATE ART_BTW AT INSERTS ONLY, LOOK AT ART_NR, for 1,3xxxx=H, 2,4,5,6xxxx=L IF SUBSTRING(@ART_NR, 1, 1) IN ('1','3') BEGIN UPDATE NG39ART SET ART_BTW='H' WHERE ART_NR=@ART_NR END IF SUBSTRING(@ART_NR, 1, 1) IN ('2','4','5','6') BEGIN UPDATE NG39ART SET ART_BTW='L' WHERE ART_NR=@ART_NR END SET @ACTION='ACTION: INSERT OF NEW RECORD, ' END IF @IF_EXIST_REC=1 -- this is an update BEGIN BEGIN TRAN upd IF SUBSTRING(@ART_NR,1,1) = '2' BEGIN IF @ART_AANT=0 BEGIN UPDATE NG39ART SET NG39ART.ART_OMS=@ART_OMS, NG39ART.ART_VRP_EENH=@ART_VRP_EENH, NG39ART.ART_PR=@ART_PR_IN FROM NG39ART WHERE NG39ART.ART_NR=@ART_NR SET @ACTION='ACTION: UPDATE ALLE KENMERKEN RECORD, ' SELECT @UPDATE_ALL_FIELDS=@UPDATE_ALL_FIELDS+1 END IF @ART_AANT >0 BEGIN UPDATE NG39ART SET NG39ART.ART_PR=@ART_PR_IN FROM NG39ART WHERE NG39ART.ART_NR=@ART_NR SET @ACTION='ACTION: UPDATE PRIJS RECORD, ' SELECT @UPDATE_PRICE=@UPDATE_PRICE+1 END END IF @@error=0 BEGIN COMMIT TRAN upd SELECT @UPDATES_OK=@UPDATES_OK+1 END ELSE BEGIN ROLLBACK TRAN upd SELECT @UPDATES_WRONG=@UPDATES_WRONG+1 END END SELECT @LOGSTRING=@ART_NR+' :'+@ACTION+' '+CONVERT(VARCHAR(64),@ACTION_DATE) SELECT @log_cmd='echo'+' '+@LOGSTRING+' >> C:\TEMP\LOAD_FILE.LOG' EXEC master.dbo.xp_cmdshell @log_cmd FETCH NEXT FROM cur1 INTO @ART_NR, @ART_OMS, @ART_VRP_EENH, @ART_PR_EX, @ART_PR_IN END CLOSE cur1 DEALLOCATE cur1 -- PART 2: LOOK FOR ART_NR's IN NG39ART WHICH NOT EXISTS IN BCP_LOAD -- ----------------------------------------------------------------- DECLARE cur1 CURSOR FOR SELECT ART_NR, ART_AANT FROM NG39ART OPEN cur1 FETCH NEXT FROM cur1 INTO @ART_NR, @ART_AANT WHILE (@@FETCH_STATUS<>-1) BEGIN SELECT @IF_EXIST_REC=(SELECT COUNT(*) FROM ##STAGE_LOAD WHERE ART_NR=@ART_NR) IF @IF_EXIST_REC=0 BEGIN IF @ART_AANT=0 BEGIN DELETE FROM NG39ART WHERE ART_NR=@ART_NR SET @ACTION='ACTION: DELETE OF RECORD, ' SELECT @DELETES=@DELETES+1 END IF @ART_AANT <>0 BEGIN SELECT @ART_NUM=SUBSTRING(@ART_NR, 2, 7) SET @ART_NUM='7'+@ART_NUM UPDATE NG39ART SET ART_NR=@ART_NUM WHERE ART_NR=@ART_NR SET @ACTION='ACTION: HERNUMMERING NAAR 7 ARTIKEL ' SELECT @HERNUMMERING=@HERNUMMERING+1 END END SELECT @LOGSTRING=@ART_NR+' :'+@ACTION+' '+CONVERT(VARCHAR(64),@ACTION_DATE) SELECT @log_cmd='echo'+' '+@LOGSTRING+' >> C:\TEMP\LOAD_FILE.LOG' EXEC master.dbo.xp_cmdshell @log_cmd FETCH NEXT FROM cur1 INTO @ART_NR, @ART_AANT END CLOSE cur1 DEALLOCATE cur1 -- CLEANUP OF TEMP STRUCTURES DROP TABLE ##BCP_LOAD DROP TABLE ##STAGE_LOAD -- COUNT NO OF RECORDS IN NG39ART AFTER UPLOAD SELECT @COUNT_AFTER=(SELECT COUNT(*) FROM NG39ART) -- END OF PROCEDURE PRINT '******************************************************************' PRINT 'UPDATE OF ARTICLES COMPLETED SUCCESFULLY.' PRINT '----------------------------------' PRINT 'NO OF RECORDS BEFORE IMPORT :'+ convert(varchar(10),@COUNT_BEFORE) PRINT 'NO OF RECORDS AFTER IMPORT :'+ convert(varchar(10),@COUNT_AFTER) PRINT '------------------------------------' PRINT 'NO OF RECORDS IN BCP_LOAD TABLE :'+ convert(varchar(10),@COUNT_BCP_LOAD) PRINT 'NO OF RECORDS IN STAGE_LOAD TABLE :'+ convert(varchar(10),@COUNT_STAGE_LOAD) PRINT 'NO OF RECORDS DISCARDED IN LOADFILE :'+ convert(varchar(10),@COUNT_BCP_LOAD-@COUNT_STAGE_LOAD) PRINT 'NO OF RECORDS IN ERROR IN UPDATE :'+ convert(varchar(10),@UPDATES_WRONG) PRINT 'NO OF RECORDS DELETED :'+ convert(varchar(10),@DELETES) PRINT 'NO OF RECORDS CHANGED TO 7xx :'+ convert(varchar(10),@HERNUMMERING) PRINT 'NO OF RECORDS ALL FIELDS CHANGED :'+ convert(varchar(10),@UPDATE_ALL_FIELDS) PRINT 'NO OF RECORDS PRICE CHANGED :'+ convert(varchar(10),@UPDATE_PRICE) PRINT 'Dif change all fields - change price :'+ convert(varchar(10),@UPDATE_ALL_FIELDS-@UPDATE_PRICE) PRINT '******************************************************************' RETURN error_section: PRINT @MESSAGE RETURN GO