sql
stringlengths 6
1.05M
|
---|
<filename>test/regexp_replace-test.sql
.read test/_settings.sql
.load dist/re
select '01', regexp_replace('the year is 2021', '[0-9]+', '2050') = 'the year is 2050';
select '02', regexp_replace('the year is 2021', '2k21', '2050') = 'the year is 2021';
select '03', regexp_replace('10 10 10', '10$', '') = '10 10 ';
select '04', regexp_replace('10 10 10', '^10', '') = ' 10 10';
select '05', regexp_replace('hello', 'h', '') = 'ello';
select '06', regexp_replace('hello', 'h', '.') = '.ello';
select '07', regexp_replace('hello', 'h', '..') = '..ello';
select '08', regexp_replace('hello', 'e', '') = 'hllo';
select '09', regexp_replace('hello', 'e', '.') = 'h.llo';
select '10', regexp_replace('hello', 'e', '..') = 'h..llo';
select '11', regexp_replace('hello', 'o', '') = 'hell';
select '12', regexp_replace('hello', 'o', '.') = 'hell.';
select '13', regexp_replace('hello', 'o', '..') = 'hell..';
|
ALTER TABLE kooste.tamperewfs_luontorastit DROP CONSTRAINT tamperewfs_luontorastit_pk;
ALTER TABLE kooste.tamperewfs_luontorastit DROP CONSTRAINT tunnus_rasti_unique;
ALTER TABLE kooste.tamperewfs_luontorastit DROP COLUMN id;
ALTER TABLE kooste.tamperewfs_luontorastit ADD COLUMN kooste_id bigint NOT NULL GENERATED ALWAYS AS IDENTITY;
ALTER TABLE kooste.tamperewfs_luontorastit ADD CONSTRAINT tamperewfs_luontorastit_pk PRIMARY KEY (kooste_id);
|
<reponame>Dragontalker/MySQL-study-notes
#视图
/*
含义: 虚拟表, 和普通表一样使用
mysql5.1版本出现的新特性, 是通过表动态生成的数据
比如: 舞蹈班和普通班的对比
创建语法的关键字 是否实际占用物理空间 使用
视图 create table 只保存了sql逻辑 增删改查, 一般不能增删改
表 create view 保准了数据ALTER 增删改查
*/
#案例: 查询姓张的学生名和专业名
SELECT stuname, major_name
FROM stuinfo AS s
INNER JOIN major AS m
ON s.major_id = m.id
WHERE s.stuname LIKE '%张';
#创建视图
CREATE VIEW v1
AS
SELECT stuname, major_name
FROM stuinfo AS s
INNER JOIN major AS m
ON s.major_id = m.id;
#使用视图
SELECT * FROM v1 WHERE s.stuname LIKE '%张';
#一、创建视图
/*
语法:
create view 视图名
as
查询语句;
*/
#1. 查询姓名中包含a字符的员工名、部门名和工种新消息
#(1)创建
CREATE VIEW myv1
AS
SELECT last_name, department_name, job_title
FROM employees AS e
JOIN departments AS d
ON e.department_id = d.department_id
JOIN jobs AS j
ON j.job_id = e.job_id;
#(2)使用
SELECT *
FROM myv1
WHERE last_name LIKE '%a%';
#2. 查询各部门的平均工资级别
#(1)创建视图查看每个部门的平均工资
CREATE VIEW myv2
AS
SELECT AVG(salary) AS ag, department_id
FROM employees
GROUP BY department_id;
#(2)使用
SELECT myv2.ag, g.grade_level
FROM myv2
JOIN job_grades AS g
ON myv2.ag BETWEEN g.lowest_sal AND g.highest_sal;
#3. 查询平均工资最低的部门信息
SELECT *
FROM myv2
ORDER BY ag
LIMIT 1;
#4. 查询平均工资最低的部门名和工资
CREATE VIEW myv3
AS
SELECT *
FROM myv2
ORDER BY ag
LIMIT 1;
SELECT d.*, m.ag
FROM myv3 AS m
JOIN departments AS d
ON m.department_id = d.department_id;
#二、视图的修改
/*
方式一:
create or replace view 视图名
as
查询语句;
*/
SELECT * FROM myv3;
CREATE OR REPLACE VIEW myv3
AS
SELECT AVG(salary), job_id
FROM employees
GROUP BY job_id;
/*
方式二:
语法:
alter view 视图名
as
查询语句;
*/
ALTER VIEW myv3
AS
SELECT * FROM myv2;
#三、删除视图
/*
语法: drop view 视图名1, 视图名2, ...;
*/
DROP VIEW myv1, myv2, myv3;
#四、查看视图
DESC myv1;
SHOW CREATE VIEW myv1;
#五、视图的更新
CREATE OR REPLACE VIEW myv1
AS
SELECT last_name, email, salary*12*(1+IFNULL(commission_pct, 0)) AS "annual salary"
FROM employees;
CREATE OR REPLACE VIEW myv1
AS
SELECT last_name, email
FROM employees;
SELECT * FROM myv1;
#1. 插入
INSERT INTO myv1
VALUES ('张飞', '<EMAIL>');
#2. 修改
UPDATE myv1
SET last_name = '张无忌'
WHERE last_name = '张飞';
#3. 删除
DELETE FROM myv1
WHERE last_name = '张无忌';
#具备以下特点的视图不允许更新
/*
(1)包含以下关键字的slq语句
- 分组函数
- distinct
- group by
- having
- union
- union all
*/
CREATE OR REPLACE VIEW myv1
AS
SELECT MAX(salary) as m, department_id
FROM employees
GROUP BY department_id;
SELECT * FROM myv1;
#更新
UPDATE myv1
SET m = 9000
WHERE department_id = 10;
#(2)常量视图
CREATE OR REPLACE VIEW myv2
AS
SELECT 'john' NAME;
UPDATE myv2
SET NAME = 'lucy'; |
<gh_stars>0
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2017-09-30',@EPS = N'0.29',@EPSDeduct = N'0',@Revenue = N'3.81亿',@RevenueYoy = N'-10.70',@RevenueQoq = N'-2.53',@Profit = N'9242.25万',@ProfitYoy = N'-40.83',@ProfiltQoq = N'6.71',@NAVPerUnit = N'4.7114',@ROE = N'6.17',@CashPerUnit = N'0.2079',@GrossProfitRate = N'69.62',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2017-10-17'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2017-03-31',@EPS = N'0.1',@EPSDeduct = N'0',@Revenue = N'1.32亿',@RevenueYoy = N'13.46',@RevenueQoq = N'-15.41',@Profit = N'3337.20万',@ProfitYoy = N'-23.72',@ProfiltQoq = N'4.96',@NAVPerUnit = N'4.6947',@ROE = N'2.23',@CashPerUnit = N'-0.0193',@GrossProfitRate = N'71.61',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2017-04-29'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2016-09-30',@EPS = N'0.49',@EPSDeduct = N'0',@Revenue = N'4.27亿',@RevenueYoy = N'17.99',@RevenueQoq = N'32.92',@Profit = N'1.56亿',@ProfitYoy = N'33.31',@ProfiltQoq = N'42.15',@NAVPerUnit = N'4.5169',@ROE = N'11.21',@CashPerUnit = N'0.3199',@GrossProfitRate = N'71.79',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2017-10-17'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2017-06-30',@EPS = N'0.19',@EPSDeduct = N'0.17',@Revenue = N'2.58亿',@RevenueYoy = N'3.41',@RevenueQoq = N'-4.68',@Profit = N'6193.85万',@ProfitYoy = N'-31.33',@ProfiltQoq = N'-14.40',@NAVPerUnit = N'4.6001',@ROE = N'4.13',@CashPerUnit = N'0.0802',@GrossProfitRate = N'70.34',@Distribution = N'不分配不转增',@DividenRate = N'-',@AnnounceDate = N'2017-08-18'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2016-03-31',@EPS = N'0.14',@EPSDeduct = N'0',@Revenue = N'1.17亿',@RevenueYoy = N'-10.08',@RevenueQoq = N'0.12',@Profit = N'4375.06万',@ProfitYoy = N'-6.41',@ProfiltQoq = N'78.11',@NAVPerUnit = N'8.6310',@ROE = N'3.22',@CashPerUnit = N'0.1391',@GrossProfitRate = N'70.73',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2017-04-29'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2016-06-30',@EPS = N'0.28',@EPSDeduct = N'0.18',@Revenue = N'2.50亿',@RevenueYoy = N'11.81',@RevenueQoq = N'14.29',@Profit = N'9019.17万',@ProfitYoy = N'27.68',@ProfiltQoq = N'6.15',@NAVPerUnit = N'4.3106',@ROE = N'6.56',@CashPerUnit = N'0.2202',@GrossProfitRate = N'69.99',@Distribution = N'不分配不转增',@DividenRate = N'-',@AnnounceDate = N'2017-08-18'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2016-12-31',@EPS = N'0.59',@EPSDeduct = N'0.44',@Revenue = N'5.83亿',@RevenueYoy = N'21.95',@RevenueQoq = N'-11.71',@Profit = N'1.88亿',@ProfitYoy = N'32.64',@ProfiltQoq = N'-51.84',@NAVPerUnit = N'4.5744',@ROE = N'13.40',@CashPerUnit = N'0.7816',@GrossProfitRate = N'71.31',@Distribution = N'10派2',@DividenRate = N'0.83',@AnnounceDate = N'2017-04-17'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2015-12-31',@EPS = N'0.49',@EPSDeduct = N'0.44',@Revenue = N'4.78亿',@RevenueYoy = N'0.51',@RevenueQoq = N'-15.86',@Profit = N'1.42亿',@ProfitYoy = N'1.06',@ProfiltQoq = N'-47.22',@NAVPerUnit = N'8.3576',@ROE = N'13.62',@CashPerUnit = N'1.1693',@GrossProfitRate = N'72.73',@Distribution = N'10转10派3',@DividenRate = N'1.29',@AnnounceDate = N'2017-04-17'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2015-03-31',@EPS = N'0.39',@EPSDeduct = N'0',@Revenue = N'1.30亿',@RevenueYoy = N'-',@RevenueQoq = N'23.03',@Profit = N'4674.54万',@ProfitYoy = N'-',@ProfiltQoq = N'33.51',@NAVPerUnit = N'0.0000',@ROE = N'6.61',@CashPerUnit = N'0.0000',@GrossProfitRate = N'77.46',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2016-04-30'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2014-12-31',@EPS = N'1.17',@EPSDeduct = N'0.55',@Revenue = N'4.76亿',@RevenueYoy = N'0.76',@RevenueQoq = N'-33.07',@Profit = N'1.40亿',@ProfitYoy = N'-13.88',@ProfiltQoq = N'-33.02',@NAVPerUnit = N'5.6966',@ROE = N'22.32',@CashPerUnit = N'1.2601',@GrossProfitRate = N'74.95',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2016-04-09'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2015-06-30',@EPS = N'0.56',@EPSDeduct = N'0.5',@Revenue = N'2.23亿',@RevenueYoy = N'4.90',@RevenueQoq = N'-27.67',@Profit = N'7063.62万',@ProfitYoy = N'33.34',@ProfiltQoq = N'-48.89',@NAVPerUnit = N'7.9132',@ROE = N'9.05',@CashPerUnit = N'0.4053',@GrossProfitRate = N'74.64',@Distribution = N'不分配不转增',@DividenRate = N'-',@AnnounceDate = N'2016-08-19'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2013-12-31',@EPS = N'1.36',@EPSDeduct = N'1.2',@Revenue = N'4.72亿',@RevenueYoy = N'13.07',@RevenueQoq = N'-',@Profit = N'1.63亿',@ProfitYoy = N'26.04',@ProfiltQoq = N'-',@NAVPerUnit = N'5.0277',@ROE = N'31.21',@CashPerUnit = N'1.5101',@GrossProfitRate = N'73.57',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2015-03-11'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2012-12-31',@EPS = N'1.08',@EPSDeduct = N'1.01',@Revenue = N'4.18亿',@RevenueYoy = N'1.19',@RevenueQoq = N'-',@Profit = N'1.29亿',@ProfitYoy = N'11.99',@ProfiltQoq = N'-',@NAVPerUnit = N'3.6706',@ROE = N'34.38',@CashPerUnit = N'1.2330',@GrossProfitRate = N'71.24',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2015-03-11'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2012-06-30',@EPS = N'0.49',@EPSDeduct = N'0.47',@Revenue = N'1.92亿',@RevenueYoy = N'-',@RevenueQoq = N'-',@Profit = N'5834.19万',@ProfitYoy = N'-',@ProfiltQoq = N'-',@NAVPerUnit = N'3.0800',@ROE = N'17.14',@CashPerUnit = N'0.2274',@GrossProfitRate = N'69.69',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2012-08-23'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2011-12-31',@EPS = N'0.96',@EPSDeduct = N'0.93',@Revenue = N'4.13亿',@RevenueYoy = N'41.14',@RevenueQoq = N'-',@Profit = N'1.15亿',@ProfitYoy = N'37.37',@ProfiltQoq = N'-',@NAVPerUnit = N'2.5900',@ROE = N'45.50',@CashPerUnit = N'1.2712',@GrossProfitRate = N'69.20',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2014-06-30'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2010-12-31',@EPS = N'0',@EPSDeduct = N'0',@Revenue = N'2.92亿',@RevenueYoy = N'9.73',@RevenueQoq = N'-',@Profit = N'8399.06万',@ProfitYoy = N'-9.45',@ProfiltQoq = N'-',@NAVPerUnit = N'3.2647',@ROE = N'28.98',@CashPerUnit = N'1.1830',@GrossProfitRate = N'71.33',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2012-08-23'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2009-12-31',@EPS = N'0',@EPSDeduct = N'0',@Revenue = N'2.66亿',@RevenueYoy = N'-',@RevenueQoq = N'-',@Profit = N'9275.90万',@ProfitYoy = N'-',@ProfiltQoq = N'-',@NAVPerUnit = N'6.9183',@ROE = N'44.75',@CashPerUnit = N'1.4183',@GrossProfitRate = N'72.33',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2012-08-23'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2014-09-30',@EPS = N'0.88',@EPSDeduct = N'0',@Revenue = N'3.70亿',@RevenueYoy = N'-',@RevenueQoq = N'-',@Profit = N'1.05亿',@ProfitYoy = N'-',@ProfiltQoq = N'-',@NAVPerUnit = N'0.0000',@ROE = N'17.09',@CashPerUnit = N'0.0000',@GrossProfitRate = N'76.27',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2015-10-23'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2015-09-30',@EPS = N'0.43',@EPSDeduct = N'0',@Revenue = N'3.62亿',@RevenueYoy = N'-2.32',@RevenueQoq = N'47.58',@Profit = N'1.17亿',@ProfitYoy = N'11.34',@ProfiltQoq = N'94.81',@NAVPerUnit = N'8.2041',@ROE = N'12.29',@CashPerUnit = N'0.5208',@GrossProfitRate = N'74.27',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2016-10-28'
EXEC [EST].[Proc_yjbb_Ins] @Code = N'603566',@CutoffDate = N'2014-06-30',@EPS = N'0.44',@EPSDeduct = N'0.4',@Revenue = N'2.13亿',@RevenueYoy = N'-',@RevenueQoq = N'-',@Profit = N'5297.41万',@ProfitYoy = N'-',@ProfiltQoq = N'-',@NAVPerUnit = N'0.0000',@ROE = N'8.83',@CashPerUnit = N'0.0000',@GrossProfitRate = N'76.00',@Distribution = N'-',@DividenRate = N'-',@AnnounceDate = N'2015-08-20' |
<filename>sure(4).sql
-- phpMyAdmin SQL Dump
-- version 4.6.4
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: Jan 19, 2017 at 02:52 PM
-- Server version: 10.1.14-MariaDB
-- PHP Version: 7.0.10
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `sure`
--
-- --------------------------------------------------------
--
-- Table structure for table `caveats`
--
CREATE TABLE `caveats` (
`id` int(10) UNSIGNED NOT NULL,
`Caveat_Date` date NOT NULL,
`Caveat_Ref` varchar(20) COLLATE utf8_unicode_ci NOT NULL DEFAULT '',
`Description` varchar(50) COLLATE utf8_unicode_ci NOT NULL,
`Enquiry_Details` varchar(50) COLLATE utf8_unicode_ci NOT NULL,
`LR_No` varchar(50) COLLATE utf8_unicode_ci NOT NULL DEFAULT '',
`LRNo_Block` varchar(50) COLLATE utf8_unicode_ci NOT NULL DEFAULT '',
`IR_IC_Nos` varchar(50) COLLATE utf8_unicode_ci NOT NULL,
`Size` varchar(50) COLLATE utf8_unicode_ci NOT NULL COMMENT 'In Hectares',
`Town` varchar(50) COLLATE utf8_unicode_ci NOT NULL,
`Document_Uploads` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`Publish_date` date DEFAULT NULL,
`Publish_status` tinyint(4) NOT NULL DEFAULT '0',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `caveats`
--
INSERT INTO `caveats` (`id`, `Caveat_Date`, `Caveat_Ref`, `Description`, `Enquiry_Details`, `LR_No`, `LRNo_Block`, `IR_IC_Nos`, `Size`, `Town`, `Document_Uploads`, `Publish_date`, `Publish_status`, `created_at`, `updated_at`) VALUES
(1, '2017-01-10', 'Caveat Ref', 'Desc.', 'Enq. Details', 'LRNO.', 'LRNOBlock', 'IRICNos', '123', 'Nairobi', 'KibakiCharlesWatsonNdethiCV2016 (1).pdf', NULL, 0, '2017-01-18 04:20:58', '2017-01-18 04:20:58'),
(2, '2017-01-10', 'Caveat Ref', 'Desc.', 'Enq', 'LRN', 'NEN', 'IRIC', '123', 'Nairobi', 'KibakiCharlesWatsonNdethiCV2016 (1).pdf', NULL, 0, '2017-01-18 08:57:43', '2017-01-18 08:57:43'),
(3, '2017-01-10', 'Caveat Ref', 'Description', 'Enq Details', 'LR N', 'LRNB', 'IRICN', '123', 'Nairobi', 'KibakiCharlesWatsonNdethiCV2016 (1).pdf', NULL, 0, '2017-01-18 09:32:35', '2017-01-18 09:32:35'),
(4, '2017-01-10', 'Caveat Ref', 'Description', 'Enq Details', 'LR N', 'LRNB', 'IRICN', '123', 'Nairobi', 'KibakiCharlesWatsonNdethiCV2016 (1).pdf', NULL, 0, '2017-01-18 09:35:16', '2017-01-18 09:35:16'),
(5, '2017-01-10', 'Caveat Ref', 'Desc', 'Enq Details', 'LRNO', 'LRNoBlock', 'iRCICN', '12', 'Nairobi', 'KibakiCharlesWatsonNdethiCV2016 (1).pdf', NULL, 0, '2017-01-18 09:39:21', '2017-01-18 09:39:21'),
(6, '2017-01-11', 'Caveat Ref', 'Description', 'Enq.', 'LR No', 'LRNB', 'IRIC', '123', 'Nairobi', 'Balsamiq Mockups 3.exe', NULL, 0, '2017-01-19 03:33:09', '2017-01-19 03:33:09'),
(7, '2017-01-19', 'Caveat Ref', 'Description', 'Enq. Details', 'LRNo', 'LRNo Block', 'IRICNos', '12', 'Nairobi', 'KEVIN MWANGI NDITIKA.docx', NULL, 0, '2017-01-19 05:45:38', '2017-01-19 05:45:38'),
(8, '2017-01-10', 'Caveat Ref', 'Desc', 'Enq.', 'LRNo', 'LRNB', 'IRIC', '123', 'Nairobi', 'KEVIN MWANGI NDITIKA.docx', NULL, 0, '2017-01-19 10:03:58', '2017-01-19 10:03:58');
-- --------------------------------------------------------
--
-- Table structure for table `migrations`
--
CREATE TABLE `migrations` (
`id` int(10) UNSIGNED NOT NULL,
`migration` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`batch` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(37, '2014_10_12_000000_create_users_table', 1);
-- --------------------------------------------------------
--
-- Table structure for table `password_resets`
--
CREATE TABLE `password_resets` (
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`token` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `plans`
--
CREATE TABLE `plans` (
`id` int(10) UNSIGNED NOT NULL,
`Plan_Name` varchar(20) COLLATE utf8_unicode_ci NOT NULL,
`Alias` varchar(20) COLLATE utf8_unicode_ci NOT NULL,
`Annual_Rate` double(15,8) NOT NULL,
`Monthly_Rate` double(15,8) NOT NULL,
`Notices` int(11) NOT NULL,
`trial_period` int(11) NOT NULL COMMENT 'In Days',
`Plan_Details` varchar(50) COLLATE utf8_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `plans`
--
INSERT INTO `plans` (`id`, `Plan_Name`, `Alias`, `Annual_Rate`, `Monthly_Rate`, `Notices`, `trial_period`, `Plan_Details`, `created_at`, `updated_at`) VALUES
(1, 'Jaribu Plan', 'jaribu', 30000.00000000, 3000.00000000, 5, 30, 'Entry Level', NULL, NULL),
(2, 'Hakika Plan', 'hakika', 50000.00000000, 5000.00000000, 10, 30, 'Intermediate', NULL, NULL),
(3, 'Wezesha Plan', 'wezesha', 250000.00000000, 30000.00000000, 75, 30, 'Pro', NULL, NULL);
-- --------------------------------------------------------
--
-- Table structure for table `social_providers`
--
CREATE TABLE `social_providers` (
`id` int(10) UNSIGNED NOT NULL,
`name` varchar(20) COLLATE utf8_unicode_ci NOT NULL,
`abbrev` varchar(20) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `subscriptions`
--
CREATE TABLE `subscriptions` (
`id` int(10) UNSIGNED NOT NULL,
`plan_id` int(11) NOT NULL,
`user_id` int(11) NOT NULL,
`payment_id` int(11) DEFAULT NULL,
`caveats_balance` int(11) NOT NULL,
`approved` tinyint(4) NOT NULL DEFAULT '0',
`trial_notices` int(11) NOT NULL DEFAULT '3',
`trial_start_date` date DEFAULT NULL,
`trial_end_date` date DEFAULT NULL,
`start_date` date DEFAULT NULL,
`end_date` date DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `subscriptions`
--
INSERT INTO `subscriptions` (`id`, `plan_id`, `user_id`, `payment_id`, `caveats_balance`, `approved`, `trial_notices`, `trial_start_date`, `trial_end_date`, `start_date`, `end_date`, `created_at`, `updated_at`) VALUES
(1, 2, 1, NULL, 10, 0, 0, NULL, NULL, NULL, NULL, '2017-01-18 05:36:35', '2017-01-18 05:36:35'),
(2, 1, 2, NULL, 5, 0, 0, NULL, NULL, NULL, NULL, '2017-01-18 09:02:30', '2017-01-18 09:02:30'),
(3, 1, 2, NULL, 5, 0, 0, NULL, NULL, NULL, NULL, '2017-01-18 09:16:37', '2017-01-18 09:16:37'),
(4, 2, 2, NULL, 10, 0, 0, NULL, NULL, NULL, NULL, '2017-01-18 09:18:03', '2017-01-18 09:18:03'),
(5, 1, 3, NULL, 5, 0, 3, '2017-09-08', '1970-01-01', NULL, NULL, '2017-01-19 05:47:37', '2017-01-19 05:47:37'),
(6, 1, 3, NULL, 5, 0, 3, '2017-01-19', '2017-01-19', NULL, NULL, '2017-01-19 10:14:38', '2017-01-19 10:14:38'),
(7, 1, 3, NULL, 5, 0, 3, '2017-01-19', '2017-02-18', NULL, NULL, '2017-01-19 10:23:00', '2017-01-19 10:23:00'),
(8, 1, 3, NULL, 5, 0, 3, '2017-01-19', '2017-02-18', NULL, NULL, '2017-01-19 10:28:07', '2017-01-19 10:28:07'),
(9, 1, 3, NULL, 5, 0, 3, '2017-01-19', '2017-02-18', NULL, NULL, '2017-01-19 10:43:40', '2017-01-19 10:43:40');
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` int(10) UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`role_id` int(11) NOT NULL DEFAULT '1',
`user_type_id` int(11) NOT NULL DEFAULT '1',
`password` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`confirmation_token` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`confirmed` tinyint(1) NOT NULL DEFAULT '0',
`remember_token` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `name`, `email`, `role_id`, `user_type_id`, `password`, `confirmation_token`, `confirmed`, `remember_token`, `created_at`, `updated_at`) VALUES
(1, 'Watson', '<EMAIL>', 1, 1, '$2y$10$KP1NAv5svgoWaq.hbjQXGeJG/A3cYO9T5E0Pfd79cPO1LnhgOVeP.', NULL, 1, NULL, '2017-01-18 04:20:59', '2017-01-18 04:21:33'),
(2, 'Watson', '<EMAIL>', 1, 1, '$2y$10$5bLuWyqgOYHQD8aonESUiOMv35wnZGNLo3WL2nUe4HBW2Nix0G.RK', NULL, 1, NULL, '2017-01-18 08:57:45', '2017-01-18 08:59:57'),
(3, 'Watson', '<EMAIL>', 1, 1, '$2y$10$0dVwdjYIMSWu8rdf3rrb4OI6z4mnYenPfg5KBNs4MfPnu8V1bBwWm', NULL, 1, NULL, '2017-01-19 05:45:38', '2017-01-19 05:46:49');
-- --------------------------------------------------------
--
-- Table structure for table `user_caveats`
--
CREATE TABLE `user_caveats` (
`id` int(10) UNSIGNED NOT NULL,
`email` varchar(30) COLLATE utf8_unicode_ci NOT NULL,
`caveat_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Dumping data for table `user_caveats`
--
INSERT INTO `user_caveats` (`id`, `email`, `caveat_id`) VALUES
(1, '<EMAIL>', 1),
(2, '<EMAIL>', 2),
(3, '<EMAIL>', 5),
(4, '<EMAIL>', 6),
(5, '<EMAIL>', 7),
(6, '<EMAIL>', 8);
-- --------------------------------------------------------
--
-- Table structure for table `user_roles`
--
CREATE TABLE `user_roles` (
`id` int(10) UNSIGNED NOT NULL,
`role` varchar(20) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `user_social`
--
CREATE TABLE `user_social` (
`id` int(10) UNSIGNED NOT NULL,
`user_id` int(11) NOT NULL,
`email` varchar(20) COLLATE utf8_unicode_ci NOT NULL,
`provider` varchar(20) COLLATE utf8_unicode_ci NOT NULL,
`provider_id` varchar(20) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `user_types`
--
CREATE TABLE `user_types` (
`id` int(10) UNSIGNED NOT NULL,
`type` varchar(20) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `caveats`
--
ALTER TABLE `caveats`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `password_resets`
--
ALTER TABLE `password_resets`
ADD KEY `password_resets_email_index` (`email`),
ADD KEY `password_resets_token_index` (`token`);
--
-- Indexes for table `plans`
--
ALTER TABLE `plans`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `social_providers`
--
ALTER TABLE `social_providers`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `subscriptions`
--
ALTER TABLE `subscriptions`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_email_unique` (`email`);
--
-- Indexes for table `user_caveats`
--
ALTER TABLE `user_caveats`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_roles`
--
ALTER TABLE `user_roles`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_social`
--
ALTER TABLE `user_social`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_types`
--
ALTER TABLE `user_types`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `caveats`
--
ALTER TABLE `caveats`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT for table `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=38;
--
-- AUTO_INCREMENT for table `plans`
--
ALTER TABLE `plans`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `social_providers`
--
ALTER TABLE `social_providers`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `subscriptions`
--
ALTER TABLE `subscriptions`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `user_caveats`
--
ALTER TABLE `user_caveats`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `user_roles`
--
ALTER TABLE `user_roles`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `user_social`
--
ALTER TABLE `user_social`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `user_types`
--
ALTER TABLE `user_types`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<reponame>wahyu-ramadhan69/sistem_informasi_perpustakaan
/*
SQLyog Ultimate v12.5.1 (64 bit)
MySQL - 10.4.14-MariaDB : Database - projek_perpus
*********************************************************************
*/
/*!40101 SET NAMES utf8 */;
/*!40101 SET SQL_MODE=''*/;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
CREATE DATABASE /*!32312 IF NOT EXISTS*/`projek_perpus` /*!40100 DEFAULT CHARACTER SET utf8mb4 */;
USE `projek_perpus`;
/*Table structure for table `tbl_biaya_denda` */
DROP TABLE IF EXISTS `tbl_biaya_denda`;
CREATE TABLE `tbl_biaya_denda` (
`id_biaya_denda` int(11) NOT NULL AUTO_INCREMENT,
`harga_denda` varchar(255) NOT NULL,
`stat` varchar(255) NOT NULL,
`tgl_tetap` varchar(255) NOT NULL,
PRIMARY KEY (`id_biaya_denda`)
) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_biaya_denda` */
insert into `tbl_biaya_denda`(`id_biaya_denda`,`harga_denda`,`stat`,`tgl_tetap`) values
(1,'4000','Aktif','2021-06-01'),
(11,'5600','Tidak Aktif','2021-06-01');
/*Table structure for table `tbl_buku` */
DROP TABLE IF EXISTS `tbl_buku`;
CREATE TABLE `tbl_buku` (
`id_buku` int(11) NOT NULL AUTO_INCREMENT,
`buku_id` varchar(255) NOT NULL,
`id_kategori` int(11) NOT NULL,
`id_rak` int(11) NOT NULL,
`isbn` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
`penerbit` varchar(255) DEFAULT NULL,
`pengarang` varchar(255) DEFAULT NULL,
`thn_buku` varchar(255) DEFAULT NULL,
`isi` text DEFAULT NULL,
`jml` int(11) DEFAULT NULL,
`tgl_masuk` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id_buku`)
) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_buku` */
insert into `tbl_buku`(`id_buku`,`buku_id`,`id_kategori`,`id_rak`,`isbn`,`title`,`penerbit`,`pengarang`,`thn_buku`,`isi`,`jml`,`tgl_masuk`) values
(8,'BK008',2,1,'132-123-234-231','CARA MUDAH BELAJAR PEMROGRAMAN C++','INFORMATIKA BANDUNG','BUDI RAHARJO ','2012','kontlloo',23,'2021-06-07 16:56:44'),
(11,'BK009',3,1,'3475893479','cara buat anak part 2','jepun .com','sugio','2020','bangkee',12,'2021-06-07 16:56:27');
/*Table structure for table `tbl_denda` */
DROP TABLE IF EXISTS `tbl_denda`;
CREATE TABLE `tbl_denda` (
`id_denda` int(11) NOT NULL AUTO_INCREMENT,
`pinjam_id` varchar(255) NOT NULL,
`denda` varchar(255) NOT NULL,
`lama_waktu` int(11) NOT NULL,
`tgl_denda` varchar(255) NOT NULL,
PRIMARY KEY (`id_denda`)
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_denda` */
insert into `tbl_denda`(`id_denda`,`pinjam_id`,`denda`,`lama_waktu`,`tgl_denda`) values
(5,'PJ009','0',0,'2020-05-20'),
(6,'PJ0011','0',0,'2021-06-01'),
(7,'PJ0012','0',0,'2021-06-01'),
(8,'PJ001','0',0,'2021-06-01');
/*Table structure for table `tbl_kategori` */
DROP TABLE IF EXISTS `tbl_kategori`;
CREATE TABLE `tbl_kategori` (
`id_kategori` int(11) NOT NULL AUTO_INCREMENT,
`nama_kategori` varchar(255) NOT NULL,
PRIMARY KEY (`id_kategori`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_kategori` */
insert into `tbl_kategori`(`id_kategori`,`nama_kategori`) values
(2,'Pemrograman'),
(3,'biologi');
/*Table structure for table `tbl_login` */
DROP TABLE IF EXISTS `tbl_login`;
CREATE TABLE `tbl_login` (
`id_login` int(11) NOT NULL AUTO_INCREMENT,
`anggota_id` varchar(255) NOT NULL,
`user` varchar(255) NOT NULL,
`pass` varchar(255) NOT NULL,
`level` varchar(255) NOT NULL,
`nama` varchar(255) NOT NULL,
`tempat_lahir` varchar(255) NOT NULL,
`tgl_lahir` varchar(255) NOT NULL,
`jenkel` varchar(255) NOT NULL,
`alamat` text NOT NULL,
`telepon` varchar(25) NOT NULL,
`email` varchar(255) NOT NULL,
`tgl_bergabung` varchar(255) NOT NULL,
`foto` varchar(255) NOT NULL,
PRIMARY KEY (`id_login`)
) ENGINE=InnoDB AUTO_INCREMENT=14 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_login` */
insert into `tbl_login`(`id_login`,`anggota_id`,`user`,`pass`,`level`,`nama`,`tempat_lahir`,`tgl_lahir`,`jenkel`,`alamat`,`telepon`,`email`,`tgl_bergabung`,`foto`) values
(4,'AG003','irvan','202cb962ac59075b964b07152d234b70','Anggota','irvant','bengkuluuu','2000-06-01','Laki-Laki','bengkuluuuu','0823784728364','<EMAIL>','2021-06-01','user_1622552708.png'),
(5,'AG005','charli','202cb962ac59075b964b07152d234b70','Petugas','charli','bengkullu','2000-11-02','Laki-Laki','kemumu','089327837234','<EMAIL>','2021-06-01','user_1622552851.jpg'),
(7,'AG006','jauh','202cb962ac59075b964b07152d234b70','Petugas','joni','bengkulu','2021-06-07','Laki-Laki','euwhfyewgf','939485934','<EMAIL>','2021-06-07','user_1623058689.jpeg'),
(8,'AG008','2u3iy4r2u3','e5f300ff9997e150e793a954e7efb226','Petugas','fgyewgu','hfwgeyfgyweg','2021-06-08','Laki-Laki','fywegyfwe','04569084569','<EMAIL>','2021-06-07','user_1623058788'),
(9,'AG009','jasdja','ad7544e47c9f480453c817e1f5d5c90b','Petugas','rono','behu','2021-06-09','Laki-Laki','hfuweuifwe','903593809453','<EMAIL>','2021-06-07','user_1623062167'),
(12,'AG0010','charli123','202cb962ac59075b964b07152d234b70','Petugas','charli rahmat','Bengkulu','2021-06-10','Laki-Laki','jauhhh','09345893453','<EMAIL>','2021-06-11','user_1623380369'),
(13,'AG0013','wahyu123','54b7a3221f83ca9d17f3fb072738904f','Petugas','wahyu','bengkulu','2021-06-07','Laki-Laki','jauhhh','086745234234','<EMAIL>','2021-06-11','user_1623381170');
/*Table structure for table `tbl_pinjam` */
DROP TABLE IF EXISTS `tbl_pinjam`;
CREATE TABLE `tbl_pinjam` (
`id_pinjam` int(11) NOT NULL AUTO_INCREMENT,
`pinjam_id` varchar(255) NOT NULL,
`anggota_id` varchar(255) NOT NULL,
`buku_id` varchar(255) NOT NULL,
`status` varchar(255) NOT NULL,
`tgl_pinjam` varchar(255) NOT NULL,
`lama_pinjam` int(11) NOT NULL,
`tgl_balik` varchar(255) NOT NULL,
`tgl_kembali` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id_pinjam`)
) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_pinjam` */
insert into `tbl_pinjam`(`id_pinjam`,`pinjam_id`,`anggota_id`,`buku_id`,`status`,`tgl_pinjam`,`lama_pinjam`,`tgl_balik`,`tgl_kembali`) values
(14,'PJ001','AG005','BK008','Di Kembalikan','2021-05-01',2,'2021-06-03','2021-06-01'),
(15,'PJ0015','AG005','BK008','Dipinjam','2021-05-01',2,'2021-05-03','0'),
(16,'PJ0016','AG005','BK008','Dipinjam','2021-06-01',2,'2021-05-29','0');
/*Table structure for table `tbl_rak` */
DROP TABLE IF EXISTS `tbl_rak`;
CREATE TABLE `tbl_rak` (
`id_rak` int(11) NOT NULL AUTO_INCREMENT,
`nama_rak` varchar(255) NOT NULL,
PRIMARY KEY (`id_rak`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4;
/*Data for the table `tbl_rak` */
insert into `tbl_rak`(`id_rak`,`nama_rak`) values
(1,'Rak Buku 1');
/*Table structure for table `user_token` */
DROP TABLE IF EXISTS `user_token`;
CREATE TABLE `user_token` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(128) DEFAULT NULL,
`token` varchar(128) DEFAULT NULL,
`tanggal_dibuat` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=14 DEFAULT CHARSET=utf8mb4;
/*Data for the table `user_token` */
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
|
<gh_stars>0
DROP DATABASE IF EXISTS todoanunciosdb;
CREATE DATABASE IF NOT EXISTS todoanunciosdb;
USE todoanunciosdb;
CREATE TABLE usuarios(
mail varchar (50),
pass varchar (80),
nombre varchar(50),
apellido varchar(60),
rol varchar (5),
imagen varchar (255),
PRIMARY KEY (mail)
)ENGINE=InnoDb;
INSERT INTO usuarios (mail, pass, nombre, apellido, rol, imagen) VALUES ('<EMAIL>', '<PASSWORD>', 'userName', 'userApellido','user','user.jpg' );
INSERT INTO usuarios (mail, pass, nombre, apellido, rol, imagen) VALUES ('<EMAIL>', '<PASSWORD>', 'adminName', 'adminApellido', 'admin','user3.jpg' );
CREATE TABLE anuncios(
id int(255) auto_increment not null,
titulo varchar(255),
cuerpo varchar(255),
precio float (6,2),
categoria varchar (80),
autor varchar (255),
imagen varchar (255),
constraint pk_anuncios PRIMARY KEY(id),
constraint fk_anuncios FOREIGN KEY (autor) REFERENCES usuarios (mail) ON DELETE CASCADE
)ENGINE=InnoDb;
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Se vende Coche', 'Seat Ibiza Promocion 25 aniversario con 110CV de motor.', 6500, "Equipamiento y Maquinaria", '<EMAIL>', 'coche.jpg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Vendo bicicleta "vintash"', 'Vendo la bicicleta vieja de mi padre por falta de uso', 80, "Deportes", '<EMAIL>', 'bici.jpg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Vendo pc Acer', '500gb de HDD y 8gb de ram',400, "Consolas y Videojuegos", '<EMAIL>', 'pc.jpg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Se vende Ford', 'Ford Corrida 1. 8 TD, con 90 caballos. El coche es familia de 1998. Perfecto de mecanica. ITV, correas de distribucion, ruedas, pastillas de freno, filtros y aceite recien cambiados, todo demostrable.', 9696, "Equipamiento y Maquinaria", '<EMAIL>', 'ford.jpeg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Vendo pistola de Rick y Morty', 'Autentica pistola de coleccionista de la serie Rick y Morty', 120, "Coleccionismo", '<EMAIL>', 'pistola.jpeg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Se vende la comoda de mi abuela', 'La vendo porque mi abuela decia que no le gustaba una comoda tan actual',200, "Casa, Jardin y Bricolaje", '<EMAIL>', 'comoda.jpeg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Regalo Vans nuevas', 'Es broma, no las regalo, las vendo porque no me gustan. Nunca me las he puesto asi que estan nuevas. Son se la edicion de los vengadores. Talla 43',19.50, "Ropa, Zapatos y Complementos", '<EMAIL>', 'vans.jpeg');
INSERT INTO anuncios ( titulo, cuerpo, precio, categoria, autor, imagen) VALUES ( 'Nintendo 64 Mini', 'Vendo la nintendo 64 mini, con el super mario 64 resmasterizado y dos mandos',119.50, "Consolas y Videojuegos", '<EMAIL>', 'nintendo.jpeg');
CREATE TABLE comentarios(
id int(255) auto_increment not null,
anuncio INT,
cuerpo varchar(254),
autor varchar (50),
PRIMARY KEY (id),
FOREIGN KEY (anuncio) REFERENCES anuncios (id) ON DELETE CASCADE,
FOREIGN KEY (autor) REFERENCES usuarios (mail) ON DELETE CASCADE
)ENGINE=InnoDb;
INSERT INTO comentarios ( anuncio, cuerpo, autor) VALUES ( 1, 'Hola, me gustaria saber cuantos kilometros tiene el coche', '<EMAIL>');
INSERT INTO comentarios ( anuncio, cuerpo, autor) VALUES ( 2, 'Me interesa tu bicicleta pero no puedo gastarme mas de 60 euros', '<EMAIL>');
|
USE [VipunenTK_DW]
GO
/****** Object: StoredProcedure [dbo].[p_lataa_f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7] Script Date: 3.12.2021 14:36:18 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[p_lataa_f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7] AS
--Päivittää VipunenTK-kantaan tähtimallin
--Heti sen jälkeen täyttää tabular-mallisen taulun
--Execute VipunenTK_DW.dbo.p_lataa_f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
--Drop table VipunenTK.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
/*
Truncate table [VipunenTK_DW].[dbo].[sa_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus]
Insert into [VipunenTK_DW].[dbo].[sa_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus]
Select * FROM VipunenTK_SA.[dbo].[v_sa_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus]
Truncate table VipunenTK_DW.dbo.f_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus
Insert into VipunenTK_DW.dbo.f_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus
Select * from VipunenTK_DW.dbo.v_f_4_7_Tutkinnon_suorittaneiden_aiempi_koulutus
Truncate table VipunenTK_DW.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
Insert into VipunenTK_DW.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
Select * from VipunenTK_DW.dbo.v_f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
*/
Truncate table VipunenTK.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
INSERT INTO VipunenTK.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
SELECT --top 0
[tilastovuosi]
,[tilv_date]
,f.tarkastelujakso_koodi
,[tarkastelujakso_id] = ISNULL(d_tj.id, -2)
,f.[sukupuoli_koodi]
,[sukupuoli_id] = ISNULL(d_sp.id, -1)
,f.[aidinkieli_versio1_koodi]
,[aidinkieli_id] = ISNULL(d_ak1.id, -1)
,[suorv]
,[suorlk]
,[suorituskausikoodi]
,[suorituskausi_id] = ISNULL(d_suo.kausi_id, -1)
,f.[koulutusluokitus_avain]
,[koulutusluokitus_id] = ISNULL(d_kl.id, -1)
,[alvv]
,[allk]
,[aloituskausikoodi]
,[aloituskausi_id] = ISNULL(d_alo.kausi_id, -1)
,f.[oppisopimuskoulutus_koodi]
,[oppisopimuskoulutus_id] = ISNULL(d_osk.id, -1)
,[sopimusjaksot_id] = isnull(d_amm_sj.id, -1)
,f.[ammatillisen_koulutuksen_koulutuslaji_koodi]
,[ammatillisen_koulutuksen_koulutuslaji_id] = ISNULL(d_akk.id, -1)
,f.[opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_koodi]
,[opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_id] = ISNULL(d_on.id, -1)
,f.[nuorten_aikuisten_koulutus_amm_koodi]
,[nuorten_aikuisten_koulutus_amm_id] = ISNULL(d_na.id, -1)
,[syntv]
,f.[ika_avain]
,[ika_id] = ISNULL(d_ika.id, -1)
,[henkiloiden_lkm]
--,[lulkm]
,[lu_henkiloiden_lkm] = Case when lulkm > 0 then henkiloiden_lkm else 0 end
,[lukoulk]
,[lu_aiempi_koulutusluokitus_id] = isnull(d_lu_kl.id, -1)
,[lusuorv]
,[luaikoul]
,[lu_nuorten_aikuisten_koulutus_amm_id] = ISNULL(d_lu_na.id, -1)
,[lu_ika]
,lu_ika_id = isnull(d_lu_ika.id, -1)
,[lu_aiempi_tutkinto_suoritettu]
,[lu_aiempi_tutkinto_suoritettu_id] = isnull(d_lu_ts.id, -2)
--,[ammlkm]
,[ammlkm_id] = isnull(d_amm_tm.id, -2)
,[amm_henkiloiden_lkm] = Case when ammlkm > 0 then henkiloiden_lkm else 0 end
,[ammkoulk]
,[amm_aiempi_koulutusluokitus_id] = isnull(d_amm_kl.id, -1)
,[ammsuorv]
,[ammoppis]
,[ammoppis_id] = isnull(d_amm_osk.id, -1)
,[amm_sopimusjaksot_id] = isnull(d_amm_sj2.id, -1)
,[ammtutklaja]
,[ammtutklaja_id] = isnull(d_amm_akk.id, -1)
,[ammtutktav]
,[ammtutktav_id] = isnull(d_amm_on.id, -1)
,[ammatillinen_koulutus_ryhma]
,[ammatillinen_koulutus_ryhma_id] = isnull(d_ammkl.id, -1)
,[amm_ika]
,amm_ika_id = isnull(d_amm_ika.id, -1)
,[amm_aiempi_tutkinto_suoritettu]
,[amm_aiempi_tutkinto_suoritettu_id] = isnull(d_amm_ts.id, -2)
--,[opistlkm]
,[opist_henkiloiden_lkm] = Case when opistlkm > 0 then henkiloiden_lkm else 0 end
,[opistkoulk]
,[opist_aiempi_koulutusluokitus_id] = isnull(d_opist_kl.id, -1)
,[opistsuorv]
,[opist_ika]
,opist_ika_id = isnull(d_opist_ika.id, -1)
,[opist_aiempi_tutkinto_suoritettu]
,[opist_aiempi_tutkinto_suoritettu_id] = isnull(d_opist_ts.id, -2)
--,[ammkalkm]
,[ammka_henkiloiden_lkm] = Case when ammkalkm > 0 then henkiloiden_lkm else 0 end
,[ammkakoulk]
,[ammka_aiempi_koulutusluokitus_id] = isnull(d_ammka_kl.id, -1)
,[ammkasuorv]
,[ammka_ika]
,ammka_ika_id = isnull(d_ammka_ika.id, -1)
,[ammka_aiempi_tutkinto_suoritettu]
,[ammka_aiempi_tutkinto_suoritettu_id] = isnull(d_ammka_ts.id, -2)
--,[amklkm]
,[amklkm_id] = isnull(d_amk_tm.id, -2)
,[amk_henkiloiden_lkm] = Case when amklkm > 0 then henkiloiden_lkm else 0 end
,[amkkoulk]
,[amk_aiempi_koulutusluokitus_id] = isnull(d_amk_kl.id, -1)
,[amksuorv]
,[amkaikoul]
,[amkaikoul_id] = isnull(d_amk_na.id, -1)
,[amk_ika]
,amk_ika_id = isnull(d_amk_ika.id, -1)
,[amk_aiempi_tutkinto_suoritettu]
,[amk_aiempi_tutkinto_suoritettu_id] = isnull(d_amk_ts.id, -2)
--,[akklkm]
,[akklkm_id] = isnull(d_akk_tm.id, -2)
,[akk_henkiloiden_lkm] = Case when akklkm > 0 then henkiloiden_lkm else 0 end
,[akkkoulk]
,[akk_aiempi_koulutusluokitus_id] = isnull(d_akk_kl.id, -1)
,[akksuorv]
,[akk_ika]
,akk_ika_id = isnull(d_akk_ika.id, -1)
,[akk_aiempi_tutkinto_suoritettu]
,[akk_aiempi_tutkinto_suoritettu_id] = isnull(d_akk_ts.id, -2)
--,[ylamklkm]
,[ylamklkm_henkiloiden_lkm] = Case when ylamklkm > 0 then henkiloiden_lkm else 0 end
,[ylamkkoulk]
,[ylamk_aiempi_koulutusluokitus_id] = isnull(d_ylamk_kl.id, -1)
,[ylamksuorv]
,[ylamk_ika]
,ylamk_ika_id = isnull(d_ylamk_ika.id, -1)
,[ylamk_aiempi_tutkinto_suoritettu]
,[ylamk_aiempi_tutkinto_suoritettu_id] = isnull(d_ylamk_ts.id, -2)
--,[ykklkm]
,[ykklkm_id] = isnull(d_ykk_tm.id, -2)
,[ykk_henkiloiden_lkm] = Case when ykklkm > 0 then henkiloiden_lkm else 0 end
,[ykkkoulk]
,[ykk_aiempi_koulutusluokitus_id] = isnull(d_ykk_kl.id, -1)
,[ykksuorv]
,[ykk_ika]
,ykk_ika_id = isnull(d_ykk_ika.id, -1)
,[ykk_aiempi_tutkinto_suoritettu]
,[ykk_aiempi_tutkinto_suoritettu_id] = isnull(d_ykk_ts.id, -2)
--,[laakerlkm]
,[laaker_henkiloiden_lkm] = Case when laakerlkm > 0 then henkiloiden_lkm else 0 end
,[laakerkoulk]
,[laaker_aiempi_koulutusluokitus_id] = isnull(d_laaker_kl.id, -1)
,[laakersuorv]
,[laaker_ika]
,laaker_ika_id = isnull(d_laaker_ika.id, -1)
,[laaker_aiempi_tutkinto_suoritettu]
,[laaker_aiempi_tutkinto_suoritettu_id] = isnull(d_laaker_ts.id, -2)
--,[lislkm]
,[lis_henkiloiden_lkm] = Case when lislkm > 0 then henkiloiden_lkm else 0 end
,[liskoulk]
,[lis_aiempi_koulutusluokitus_id] = isnull(d_lis_kl.id, -1)
,[lissuorv]
,[lis_ika]
,lis_ika_id = isnull(d_lis_ika.id, -1)
,[lis_aiempi_tutkinto_suoritettu]
,[lis_aiempi_tutkinto_suoritettu_id] = isnull(d_lis_ts.id, -2)
--,[tohtlkm]
,[toht_henkiloiden_lkm] = Case when tohtlkm > 0 then henkiloiden_lkm else 0 end
,[tohtkoulk]
,[toht_aiempi_koulutusluokitus_id] = isnull(d_toht_kl.id, -1)
,[tohtsuorv]
,[toht_ika]
,toht_ika_id = isnull(d_toht_ika.id, -1)
,[toht_aiempi_tutkinto_suoritettu]
,[toht_aiempi_tutkinto_suoritettu_id] = isnull(d_toht_ts.id, -2)
,yliopistotutkinto_aiempi_suoritettu_id = isnull(d_yo_ts.id, -2)
,yo_jatkotutkinto_aiempi_suoritettu_id = isnull(d_yojatko_ts.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_aiempi_suoritettu_id = isnull(d_opammka_ts.id, -2)
,korkeakoulututkinto_aiempi_suoritettu_id = isnull(d_kk_ts.id, -2)
,toisen_asteen_tutkinto_aiempi_suoritettu_id = isnull(d_aste2_ts.id, -2)
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma1_id = isnull(d_amm_r1_ts.id, -2)
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma2_id = isnull(d_amm_r2_ts.id, -2)
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma3_id = isnull(d_amm_r3_ts.id, -2)
-- koulutusasteiden yhteenlasketut (netotut) henkilöiden lukumäärät
,Aiemman_yliopistotutkinnon_suorittaneiden_lkm = Case
when yliopistotutkinto_aiempi_suoritettu = 'K' then henkiloiden_lkm else 0 end
,Aiemman_yo_jatkotutkinnon_suorittaneiden_lkm = Case
when yo_jatkotutkinto_aiempi_suoritettu = 'K' then henkiloiden_lkm else 0 end
,Aiemman_opisto_tai_ammatillisen_korkea_asteen_tutkinnon_suorittaneiden_lkm = Case
when opisto_tai_ammatillisen_korkea_asteen_tutkinto_aiempi_suoritettu = 'K' then henkiloiden_lkm else 0 end
,Aiemman_korkeakoulututkinnon_suorittaneiden_lkm = Case
when korkeakoulututkinto_aiempi_suoritettu = 'K' then henkiloiden_lkm else 0 end
,Aiemman_toisen_asteen_tutkinnon_suorittaneiden_lkm = Case
when toisen_asteen_tutkinto_aiempi_suoritettu = 'K' then henkiloiden_lkm else 0 end
,Aiemman_ammatillisen_tutkinnon_ryhma1_suorittaneiden_lkm = --32, ryhmä 1
Case when ammatillisen_tutkinto_aiempi_suoritettu_ryhma1 = 'K' then henkiloiden_lkm else 0 end
,Aiemman_ammatillisen_tutkinnon_ryhma2_suorittaneiden_lkm = --32, ryhmä 1
Case when ammatillisen_tutkinto_aiempi_suoritettu_ryhma2 = 'K' then henkiloiden_lkm else 0 end
,Aiemman_ammatillisen_tutkinnon_ryhma3_suorittaneiden_lkm = --32, ryhmä 1
Case when ammatillisen_tutkinto_aiempi_suoritettu_ryhma3 = 'K' then henkiloiden_lkm else 0 end
-- Lisätty uusi mittari 30.5.2016 / CSC Jarmo
,Ei_aiempaa_perusasteen_jalkeista_tutkintoa_lkm = Case
when lulkm=0 and ammlkm=0 and opistlkm=0 and ammkalkm=0 and amklkm=0 and akklkm=0 and ylamklkm=0 and ykklkm=0 and laakerlkm=0 and lislkm=0 and tohtlkm=0 then henkiloiden_lkm
else 0 end
--Haetaan surrogaatit koulutusalan pysyvyydelle
/* Alla K/E-kentät
,lu_sama_aiempi_koulutusala2002
,amm_sama_aiempi_koulutusala2002
,opist_sama_aiempi_koulutusala2002
,ammka_sama_aiempi_koulutusala2002
,akk_sama_aiempi_koulutusala2002
,amk_sama_aiempi_koulutusala2002
,ylamk_sama_aiempi_koulutusala2002
,ykk_sama_aiempi_koulutusala2002
,laaker_sama_aiempi_koulutusala2002
,lis_sama_aiempi_koulutusala2002
,toht_sama_aiempi_koulutusala2002
,yliopistotutkinto_sama_aiempi_koulutusala2002
,yo_jatkotutkinto_sama_aiempi_koulutusala2002
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002
,korkeakoulututkinto_sama_aiempi_koulutusala2002
,toisen_asteen_tutkinto_sama_aiempi_koulutusala2002
,lu_sama_aiempi_opintoala2002
,amm_sama_aiempi_opintoala2002
,opist_sama_aiempi_opintoala2002
,ammka_sama_aiempi_opintoala2002
,akk_sama_aiempi_opintoala2002
,amk_sama_aiempi_opintoala2002
,ylamk_sama_aiempi_opintoala2002
,ykk_sama_aiempi_opintoala2002
,laaker_sama_aiempi_opintoala2002
,lis_sama_aiempi_opintoala2002
,toht_sama_aiempi_opintoala2002
,yliopistotutkinto_sama_aiempi_opintoala2002
,yo_jatkotutkinto_sama_aiempi_opintoala2002
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002
,korkeakoulututkinto_sama_aiempi_opintoala2002
,toisen_asteen_tutkinto_sama_aiempi_opintoala2002
,lu_sama_aiempi_opintoala1995
,amm_sama_aiempi_opintoala1995
,opist_sama_aiempi_opintoala1995
,ammka_sama_aiempi_opintoala1995
,akk_sama_aiempi_opintoala1995
,amk_sama_aiempi_opintoala1995
,ylamk_sama_aiempi_opintoala1995
,ykk_sama_aiempi_opintoala1995
,laaker_sama_aiempi_opintoala1995
,lis_sama_aiempi_opintoala1995
,toht_sama_aiempi_opintoala1995
,yliopistotutkinto_sama_aiempi_opintoala1995
,yo_jatkotutkinto_sama_aiempi_opintoala1995
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995
,korkeakoulututkinto_sama_aiempi_opintoala1995
,toisen_asteen_tutkinto_sama_aiempi_opintoala1995
,lu_sama_aiempi_iscfibroad2013
,amm_sama_aiempi_iscfibroad2013
,opist_sama_aiempi_iscfibroad2013
,ammka_sama_aiempi_iscfibroad2013
,akk_sama_aiempi_iscfibroad2013
,amk_sama_aiempi_iscfibroad2013
,ylamk_sama_aiempi_iscfibroad2013
,ykk_sama_aiempi_iscfibroad2013
,laaker_sama_aiempi_iscfibroad2013
,lis_sama_aiempi_iscfibroad2013
,toht_sama_aiempi_iscfibroad2013
,yliopistotutkinto_sama_aiempi_iscfibroad2013
,yo_jatkotutkinto_sama_aiempi_iscfibroad2013
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013
,korkeakoulututkinto_sama_aiempi_iscfibroad2013
,toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013
,lu_sama_aiempi_iscfinarrow2013
,amm_sama_aiempi_iscfinarrow2013
,opist_sama_aiempi_iscfinarrow2013
,ammka_sama_aiempi_iscfinarrow2013
,akk_sama_aiempi_iscfinarrow2013
,amk_sama_aiempi_iscfinarrow2013
,ylamk_sama_aiempi_iscfinarrow2013
,ykk_sama_aiempi_iscfinarrow2013
,laaker_sama_aiempi_iscfinarrow2013
,lis_sama_aiempi_iscfinarrow2013
,toht_sama_aiempi_iscfinarrow2013
,yliopistotutkinto_sama_aiempi_iscfinarrow2013
,yo_jatkotutkinto_sama_aiempi_iscfinarrow2013
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013
,korkeakoulututkinto_sama_aiempi_iscfinarrow2013
,toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013
*/
--selitteet
,lu_sama_aiempi_koulutusala2002_id = ISNULL(lu_sama_aiempi_koulutusala2002.id, -2)
,amm_sama_aiempi_koulutusala2002_id = ISNULL(amm_sama_aiempi_koulutusala2002.id, -2)
,opist_sama_aiempi_koulutusala2002_id = ISNULL(opist_sama_aiempi_koulutusala2002.id, -2)
,ammka_sama_aiempi_koulutusala2002_id = ISNULL(ammka_sama_aiempi_koulutusala2002.id, -2)
,akk_sama_aiempi_koulutusala2002_id = ISNULL(akk_sama_aiempi_koulutusala2002.id, -2)
,amk_sama_aiempi_koulutusala2002_id = ISNULL(amk_sama_aiempi_koulutusala2002.id, -2)
,ylamk_sama_aiempi_koulutusala2002_id = ISNULL(ylamk_sama_aiempi_koulutusala2002.id, -2)
,ykk_sama_aiempi_koulutusala2002_id = ISNULL(ykk_sama_aiempi_koulutusala2002.id, -2)
,laaker_sama_aiempi_koulutusala2002_id = ISNULL(laaker_sama_aiempi_koulutusala2002.id, -2)
,lis_sama_aiempi_koulutusala2002_id = ISNULL(lis_sama_aiempi_koulutusala2002.id, -2)
,toht_sama_aiempi_koulutusala2002_id = ISNULL(toht_sama_aiempi_koulutusala2002.id, -2)
,yliopistotutkinto_sama_aiempi_koulutusala2002_id = ISNULL(yliopistotutkinto_sama_aiempi_koulutusala2002.id, -2)
,yo_jatkotutkinto_sama_aiempi_koulutusala2002_id = ISNULL(yo_jatkotutkinto_sama_aiempi_koulutusala2002.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002_id = ISNULL(opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002.id, -2)
,korkeakoulututkinto_sama_aiempi_koulutusala2002_id = ISNULL(korkeakoulututkinto_sama_aiempi_koulutusala2002.id, -2)
,toisen_asteen_tutkinto_sama_aiempi_koulutusala2002_id = ISNULL(toisen_asteen_tutkinto_sama_aiempi_koulutusala2002.id, -2)
,lu_sama_aiempi_opintoala2002_id = ISNULL(lu_sama_aiempi_opintoala2002.id, -2)
,amm_sama_aiempi_opintoala2002_id = ISNULL(amm_sama_aiempi_opintoala2002.id, -2)
,opist_sama_aiempi_opintoala2002_id = ISNULL(opist_sama_aiempi_opintoala2002.id, -2)
,ammka_sama_aiempi_opintoala2002_id = ISNULL(ammka_sama_aiempi_opintoala2002.id, -2)
,akk_sama_aiempi_opintoala2002_id = ISNULL(akk_sama_aiempi_opintoala2002.id, -2)
,amk_sama_aiempi_opintoala2002_id = ISNULL(amk_sama_aiempi_opintoala2002.id, -2)
,ylamk_sama_aiempi_opintoala2002_id = ISNULL(ylamk_sama_aiempi_opintoala2002.id, -2)
,ykk_sama_aiempi_opintoala2002_id = ISNULL(ykk_sama_aiempi_opintoala2002.id, -2)
,laaker_sama_aiempi_opintoala2002_id = ISNULL(laaker_sama_aiempi_opintoala2002.id, -2)
,lis_sama_aiempi_opintoala2002_id = ISNULL(lis_sama_aiempi_opintoala2002.id, -2)
,toht_sama_aiempi_opintoala2002_id = ISNULL(toht_sama_aiempi_opintoala2002.id, -2)
,yliopistotutkinto_sama_aiempi_opintoala2002_id = ISNULL(yliopistotutkinto_sama_aiempi_opintoala2002.id, -2)
,yo_jatkotutkinto_sama_aiempi_opintoala2002_id = ISNULL(yo_jatkotutkinto_sama_aiempi_opintoala2002.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002_id = ISNULL(opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002.id, -2)
,korkeakoulututkinto_sama_aiempi_opintoala2002_id = ISNULL(korkeakoulututkinto_sama_aiempi_opintoala2002.id, -2)
,toisen_asteen_tutkinto_sama_aiempi_opintoala2002_id = ISNULL(toisen_asteen_tutkinto_sama_aiempi_opintoala2002.id, -2)
,lu_sama_aiempi_opintoala1995_id = ISNULL(lu_sama_aiempi_opintoala1995.id, -2)
,amm_sama_aiempi_opintoala1995_id = ISNULL(amm_sama_aiempi_opintoala1995.id, -2)
,opist_sama_aiempi_opintoala1995_id = ISNULL(opist_sama_aiempi_opintoala1995.id, -2)
,ammka_sama_aiempi_opintoala1995_id = ISNULL(ammka_sama_aiempi_opintoala1995.id, -2)
,akk_sama_aiempi_opintoala1995_id = ISNULL(akk_sama_aiempi_opintoala1995.id, -2)
,amk_sama_aiempi_opintoala1995_id = ISNULL(amk_sama_aiempi_opintoala1995.id, -2)
,ylamk_sama_aiempi_opintoala1995_id = ISNULL(ylamk_sama_aiempi_opintoala1995.id, -2)
,ykk_sama_aiempi_opintoala1995_id = ISNULL(ykk_sama_aiempi_opintoala1995.id, -2)
,laaker_sama_aiempi_opintoala1995_id = ISNULL(laaker_sama_aiempi_opintoala1995.id, -2)
,lis_sama_aiempi_opintoala1995_id = ISNULL(lis_sama_aiempi_opintoala1995.id, -2)
,toht_sama_aiempi_opintoala1995_id = ISNULL(toht_sama_aiempi_opintoala1995.id, -2)
,yliopistotutkinto_sama_aiempi_opintoala1995_id = ISNULL(yliopistotutkinto_sama_aiempi_opintoala1995.id, -2)
,yo_jatkotutkinto_sama_aiempi_opintoala1995_id = ISNULL(yo_jatkotutkinto_sama_aiempi_opintoala1995.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995_id = ISNULL(opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995.id, -2)
,korkeakoulututkinto_sama_aiempi_opintoala1995_id = ISNULL(korkeakoulututkinto_sama_aiempi_opintoala1995.id, -2)
,toisen_asteen_tutkinto_sama_aiempi_opintoala1995_id = ISNULL(toisen_asteen_tutkinto_sama_aiempi_opintoala1995.id, -2)
,lu_sama_aiempi_iscfibroad2013_id = ISNULL(lu_sama_aiempi_iscfibroad2013.id, -2)
,amm_sama_aiempi_iscfibroad2013_id = ISNULL(amm_sama_aiempi_iscfibroad2013.id, -2)
,opist_sama_aiempi_iscfibroad2013_id = ISNULL(opist_sama_aiempi_iscfibroad2013.id, -2)
,ammka_sama_aiempi_iscfibroad2013_id = ISNULL(ammka_sama_aiempi_iscfibroad2013.id, -2)
,akk_sama_aiempi_iscfibroad2013_id = ISNULL(akk_sama_aiempi_iscfibroad2013.id, -2)
,amk_sama_aiempi_iscfibroad2013_id = ISNULL(amk_sama_aiempi_iscfibroad2013.id, -2)
,ylamk_sama_aiempi_iscfibroad2013_id = ISNULL(ylamk_sama_aiempi_iscfibroad2013.id, -2)
,ykk_sama_aiempi_iscfibroad2013_id = ISNULL(ykk_sama_aiempi_iscfibroad2013.id, -2)
,laaker_sama_aiempi_iscfibroad2013_id = ISNULL(laaker_sama_aiempi_iscfibroad2013.id, -2)
,lis_sama_aiempi_iscfibroad2013_id = ISNULL(lis_sama_aiempi_iscfibroad2013.id, -2)
,toht_sama_aiempi_iscfibroad2013_id = ISNULL(toht_sama_aiempi_iscfibroad2013.id, -2)
,yliopistotutkinto_sama_aiempi_iscfibroad2013_id = ISNULL(yliopistotutkinto_sama_aiempi_iscfibroad2013.id, -2)
,yo_jatkotutkinto_sama_aiempi_iscfibroad2013_id = ISNULL(yo_jatkotutkinto_sama_aiempi_iscfibroad2013.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013_id = ISNULL(opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013.id, -2)
,korkeakoulututkinto_sama_aiempi_iscfibroad2013_id = ISNULL(korkeakoulututkinto_sama_aiempi_iscfibroad2013.id, -2)
,toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013_id = ISNULL(toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013.id, -2)
,lu_sama_aiempi_iscfinarrow2013_id = ISNULL(lu_sama_aiempi_iscfinarrow2013.id, -2)
,amm_sama_aiempi_iscfinarrow2013_id = ISNULL(amm_sama_aiempi_iscfinarrow2013.id, -2)
,opist_sama_aiempi_iscfinarrow2013_id = ISNULL(opist_sama_aiempi_iscfinarrow2013.id, -2)
,ammka_sama_aiempi_iscfinarrow2013_id = ISNULL(ammka_sama_aiempi_iscfinarrow2013.id, -2)
,akk_sama_aiempi_iscfinarrow2013_id = ISNULL(akk_sama_aiempi_iscfinarrow2013.id, -2)
,amk_sama_aiempi_iscfinarrow2013_id = ISNULL(amk_sama_aiempi_iscfinarrow2013.id, -2)
,ylamk_sama_aiempi_iscfinarrow2013_id = ISNULL(ylamk_sama_aiempi_iscfinarrow2013.id, -2)
,ykk_sama_aiempi_iscfinarrow2013_id = ISNULL(ykk_sama_aiempi_iscfinarrow2013.id, -2)
,laaker_sama_aiempi_iscfinarrow2013_id = ISNULL(laaker_sama_aiempi_iscfinarrow2013.id, -2)
,lis_sama_aiempi_iscfinarrow2013_id = ISNULL(lis_sama_aiempi_iscfinarrow2013.id, -2)
,toht_sama_aiempi_iscfinarrow2013_id = ISNULL(toht_sama_aiempi_iscfinarrow2013.id, -2)
,yliopistotutkinto_sama_aiempi_iscfinarrow2013_id = ISNULL(yliopistotutkinto_sama_aiempi_iscfinarrow2013.id, -2)
,yo_jatkotutkinto_sama_aiempi_iscfinarrow2013_id = ISNULL(yo_jatkotutkinto_sama_aiempi_iscfinarrow2013.id, -2)
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013_id = ISNULL(opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013.id, -2)
,korkeakoulututkinto_sama_aiempi_iscfinarrow2013_id = ISNULL(korkeakoulututkinto_sama_aiempi_iscfinarrow2013.id, -2)
,toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013_id = ISNULL(toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013.id, -2)
,f.[tietolahde]
,[rivinumero]
--INTO VipunenTK.dbo.f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7
FROM ( --f
SELECT [tilastovuosi]
,[tilv_date]
,[sukupuoli_koodi]
,[aidinkieli_versio1_koodi]
,[suorv]
,[suorlk]
,[suorituskausikoodi]
,f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.[koulutusluokitus_avain]
,[alvv]
,[allk]
,[aloituskausikoodi]
,[oppisopimuskoulutus_koodi]
,sopimusjaksot
,[ammatillisen_koulutuksen_koulutuslaji_koodi]
,[opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_koodi]
,[ammatillinen_koulutus_ryhma]
,[ammatillinen_koulutus_ryhma_avain]
,[nuorten_aikuisten_koulutus_amm_koodi]
,[syntv]
,[ika_avain]
,[henkiloiden_lkm]
,[lulkm]
,[lukoulk]
,[lusuorv]
,[luaikoul]
,[lu_ika]
,[lu_aiempi_tutkinto_suoritettu]
,[ammlkm]
,[ammkoulk]
,[ammsuorv]
,[ammoppis]
,amm_sopimusjaksot
,[ammtutklaja]
,[ammtutktav]
,[amm_ika]
,[amm_aiempi_tutkinto_suoritettu]
,[opistlkm]
,[opistkoulk]
,[opistsuorv]
,[opist_ika]
,[opist_aiempi_tutkinto_suoritettu]
,[ammkalkm]
,[ammkakoulk]
,[ammkasuorv]
,[ammka_ika]
,[ammka_aiempi_tutkinto_suoritettu]
,[amklkm]
,[amkkoulk]
,[amksuorv]
,[amkaikoul]
,[amk_ika]
,[amk_aiempi_tutkinto_suoritettu]
,[akklkm]
,[akkkoulk]
,[akksuorv]
,[akk_ika]
,[akk_aiempi_tutkinto_suoritettu]
,[ylamklkm]
,[ylamkkoulk]
,[ylamksuorv]
,[ylamk_ika]
,[ylamk_aiempi_tutkinto_suoritettu]
,[ykklkm]
,[ykkkoulk]
,[ykksuorv]
,[ykk_ika]
,[ykk_aiempi_tutkinto_suoritettu]
,[laakerlkm]
,[laakerkoulk]
,[laakersuorv]
,[laaker_ika]
,[laaker_aiempi_tutkinto_suoritettu]
,[lislkm]
,[liskoulk]
,[lissuorv]
,[lis_ika]
,[lis_aiempi_tutkinto_suoritettu]
,[tohtlkm]
,[tohtkoulk]
,[tohtsuorv]
,[toht_ika]
,[toht_aiempi_tutkinto_suoritettu]
,f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.[tietolahde]
,[rivinumero]
,[tarkastelujakso_koodi]
,tutkintojen_maara_amm_avain = 'AIK_AMM_' + Cast(Case when ammlkm > 9 then 9 else ammlkm end as nvarchar(2))
,tutkintojen_maara_amk_avain = 'AIK_AMK_' + Cast(Case when amklkm > 9 then 9 else amklkm end as nvarchar(2))
,tutkintojen_maara_akk_avain = 'AIK_AKK_' + Cast(Case when akklkm > 9 then 9 else akklkm end as nvarchar(2))
,tutkintojen_maara_ykk_avain = 'AIK_YKK_' + Cast(Case when ykklkm > 9 then 9 else ykklkm end as nvarchar(2))
,tutkinto_suoritettu_lu_avain = 'AIK_LU_' + lu_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_amm_avain = 'AIK_AMM_' + amm_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_opisto_avain = 'AIK_OPISTO_' + opist_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_ammka_avain = 'AIK_AMMKA_' + ammka_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_amk_avain = 'AIK_AMK_' + amk_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_akk_avain = 'AIK_AKK_' + akk_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_ylamk_avain = 'AIK_YAMK_' + ylamk_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_ykk_avain = 'AIK_YKK_' + ykk_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_laaker_avain = 'AIK_LAAKER_' + laaker_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_lis_avain = 'AIK_LIS_' + lis_aiempi_tutkinto_suoritettu
,tutkinto_suoritettu_toht_avain = 'AIK_TOHT_' + toht_aiempi_tutkinto_suoritettu
,yliopistotutkinto_aiempi_suoritettu = Coalesce( -- akklkm, ykklkm, laakerlkm --63, 72, 73
Nullif(akk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(ykk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(laaker_aiempi_tutkinto_suoritettu, 'E'),
'E')
,yo_jatkotutkinto_aiempi_suoritettu = Coalesce(--81, 82
Nullif(lis_aiempi_tutkinto_suoritettu, 'E'),
Nullif(toht_aiempi_tutkinto_suoritettu, 'E'),
'E')
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_aiempi_suoritettu = Coalesce( --50, 61
Nullif(opist_aiempi_tutkinto_suoritettu, 'E'),
Nullif(ammka_aiempi_tutkinto_suoritettu, 'E'),
'E')
,korkeakoulututkinto_aiempi_suoritettu = Coalesce( --62, 63, 71, 72, 73
Nullif(amk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(akk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(ylamk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(ykk_aiempi_tutkinto_suoritettu, 'E'),
Nullif(laaker_aiempi_tutkinto_suoritettu, 'E'),
'E')
,toisen_asteen_tutkinto_aiempi_suoritettu = Coalesce( --31, 32 ,
Nullif(lu_aiempi_tutkinto_suoritettu, 'E'),
Nullif(amm_aiempi_tutkinto_suoritettu, 'E'),
'E')
--,ammatillinen_koulutus_ryhma
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma1 = --32, ryhmä 1
Case when ammatillinen_koulutus_ryhma = 'R1' then [amm_aiempi_tutkinto_suoritettu] else 'E' end
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma2 = --32, ryhmä 2
Case when ammatillinen_koulutus_ryhma = 'R2' then [amm_aiempi_tutkinto_suoritettu] else 'E' end
,ammatillisen_tutkinto_aiempi_suoritettu_ryhma3 = --32, ryhmä 3
Case when ammatillinen_koulutus_ryhma = 'R3' then [amm_aiempi_tutkinto_suoritettu] else 'E' end
,lu_sama_aiempi_koulutusala2002_avain = Case when lu_aiempi_tutkinto_suoritettu = 'K' then 'lu_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_lu_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,amm_sama_aiempi_koulutusala2002_avain = Case when amm_aiempi_tutkinto_suoritettu = 'K' then 'amm_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_amm_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,opist_sama_aiempi_koulutusala2002_avain = Case when opist_aiempi_tutkinto_suoritettu = 'K' then 'opist_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_opist_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,ammka_sama_aiempi_koulutusala2002_avain = Case when ammka_aiempi_tutkinto_suoritettu = 'K' then 'ammka_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_ammka_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,akk_sama_aiempi_koulutusala2002_avain = Case when akk_aiempi_tutkinto_suoritettu = 'K' then 'akk_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_akk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,amk_sama_aiempi_koulutusala2002_avain = Case when amk_aiempi_tutkinto_suoritettu = 'K' then 'amk_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_amk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,ylamk_sama_aiempi_koulutusala2002_avain = Case when ylamk_aiempi_tutkinto_suoritettu = 'K' then 'ylamk_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_ylamk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,ykk_sama_aiempi_koulutusala2002_avain = Case when ykk_aiempi_tutkinto_suoritettu = 'K' then 'ykk_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_ykk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,laaker_sama_aiempi_koulutusala2002_avain = Case when laaker_aiempi_tutkinto_suoritettu = 'K' then 'laaker_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_laaker_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,lis_sama_aiempi_koulutusala2002_avain = Case when lis_aiempi_tutkinto_suoritettu = 'K' then 'lis_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_lis_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,toht_sama_aiempi_koulutusala2002_avain = Case when toht_aiempi_tutkinto_suoritettu = 'K' then 'toht_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi = d_toht_kl_ala.koulutusala2002_koodi then 'K' else 'E' end else '-1' end
,yliopistotutkinto_sama_aiempi_koulutusala2002_avain = Case when 'K' in
(akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'yliopistotutkinto_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi in (
d_akk_kl_ala.koulutusala2002_koodi,
d_ykk_kl_ala.koulutusala2002_koodi,
d_laaker_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end else '-1' end
,yo_jatkotutkinto_sama_aiempi_koulutusala2002_avain = Case when 'K' in
(lis_aiempi_tutkinto_suoritettu,
toht_aiempi_tutkinto_suoritettu)
then 'yo_jatkotutkinto_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi in (
d_lis_kl_ala.koulutusala2002_koodi,
d_toht_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end else '-1' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002_avain = Case when 'K' in
(opist_aiempi_tutkinto_suoritettu,
ammka_aiempi_tutkinto_suoritettu)
then 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi in (
d_opist_kl_ala.koulutusala2002_koodi,
d_ammka_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end else '-1' end
,korkeakoulututkinto_sama_aiempi_koulutusala2002_avain = Case when 'K' in
(amk_aiempi_tutkinto_suoritettu,
ylamk_aiempi_tutkinto_suoritettu,
akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'korkeakoulututkinto_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi in (
d_amk_kl_ala.koulutusala2002_koodi,
d_ylamk_kl_ala.koulutusala2002_koodi,
d_akk_kl_ala.koulutusala2002_koodi,
d_ykk_kl_ala.koulutusala2002_koodi,
d_laaker_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end else '-1' end
,toisen_asteen_tutkinto_sama_aiempi_koulutusala2002_avain = Case when 'K' in
(lu_aiempi_tutkinto_suoritettu,
amm_aiempi_tutkinto_suoritettu)
then 'toisen_asteen_tutkinto_sama_aiempi_koulutusala2002_' + Case when d_kl_ala.koulutusala2002_koodi in (
d_lu_kl_ala.koulutusala2002_koodi,
d_amm_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end else '-1' end
,lu_sama_aiempi_opintoala2002_avain = Case when lu_aiempi_tutkinto_suoritettu = 'K' then 'lu_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_lu_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,amm_sama_aiempi_opintoala2002_avain = Case when amm_aiempi_tutkinto_suoritettu = 'K' then 'amm_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_amm_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,opist_sama_aiempi_opintoala2002_avain = Case when opist_aiempi_tutkinto_suoritettu = 'K' then 'opist_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_opist_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,ammka_sama_aiempi_opintoala2002_avain = Case when ammka_aiempi_tutkinto_suoritettu = 'K' then 'ammka_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_ammka_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,akk_sama_aiempi_opintoala2002_avain = Case when akk_aiempi_tutkinto_suoritettu = 'K' then 'akk_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_akk_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,amk_sama_aiempi_opintoala2002_avain = Case when amk_aiempi_tutkinto_suoritettu = 'K' then 'amk_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_amk_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,ylamk_sama_aiempi_opintoala2002_avain = Case when ylamk_aiempi_tutkinto_suoritettu = 'K' then 'ylamk_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_ylamk_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,ykk_sama_aiempi_opintoala2002_avain = Case when ykk_aiempi_tutkinto_suoritettu = 'K' then 'ykk_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_ykk_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,laaker_sama_aiempi_opintoala2002_avain = Case when laaker_aiempi_tutkinto_suoritettu = 'K' then 'laaker_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_laaker_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,lis_sama_aiempi_opintoala2002_avain = Case when lis_aiempi_tutkinto_suoritettu = 'K' then 'lis_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_lis_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,toht_sama_aiempi_opintoala2002_avain = Case when toht_aiempi_tutkinto_suoritettu = 'K' then 'toht_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi = d_toht_kl_ala.opintoala2002_koodi then 'K' else 'E' end else '-1' end
,yliopistotutkinto_sama_aiempi_opintoala2002_avain = Case when 'K' in
(akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'yliopistotutkinto_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi in (
d_akk_kl_ala.opintoala2002_koodi,
d_ykk_kl_ala.opintoala2002_koodi,
d_laaker_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end else '-1' end
,yo_jatkotutkinto_sama_aiempi_opintoala2002_avain = Case when 'K' in
(lis_aiempi_tutkinto_suoritettu,
toht_aiempi_tutkinto_suoritettu)
then 'yo_jatkotutkinto_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi in (
d_lis_kl_ala.opintoala2002_koodi,
d_toht_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end else '-1' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002_avain = Case when 'K' in
(opist_aiempi_tutkinto_suoritettu,
ammka_aiempi_tutkinto_suoritettu)
then 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi in (
d_opist_kl_ala.opintoala2002_koodi,
d_ammka_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end else '-1' end
,korkeakoulututkinto_sama_aiempi_opintoala2002_avain = Case when 'K' in
(amk_aiempi_tutkinto_suoritettu,
ylamk_aiempi_tutkinto_suoritettu,
akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'korkeakoulututkinto_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi in (
d_amk_kl_ala.opintoala2002_koodi,
d_ylamk_kl_ala.opintoala2002_koodi,
d_akk_kl_ala.opintoala2002_koodi,
d_ykk_kl_ala.opintoala2002_koodi,
d_laaker_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end else '-1' end
,toisen_asteen_tutkinto_sama_aiempi_opintoala2002_avain = Case when 'K' in
(lu_aiempi_tutkinto_suoritettu,
amm_aiempi_tutkinto_suoritettu)
then 'toisen_asteen_tutkinto_sama_aiempi_opintoala2002_' + Case when d_kl_ala.opintoala2002_koodi in (
d_lu_kl_ala.opintoala2002_koodi,
d_amm_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end else '-1' end
,lu_sama_aiempi_opintoala1995_avain = Case when lu_aiempi_tutkinto_suoritettu = 'K' then 'lu_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_lu_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,amm_sama_aiempi_opintoala1995_avain = Case when amm_aiempi_tutkinto_suoritettu = 'K' then 'amm_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_amm_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,opist_sama_aiempi_opintoala1995_avain = Case when opist_aiempi_tutkinto_suoritettu = 'K' then 'opist_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_opist_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,ammka_sama_aiempi_opintoala1995_avain = Case when ammka_aiempi_tutkinto_suoritettu = 'K' then 'ammka_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_ammka_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,akk_sama_aiempi_opintoala1995_avain = Case when akk_aiempi_tutkinto_suoritettu = 'K' then 'akk_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_akk_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,amk_sama_aiempi_opintoala1995_avain = Case when amk_aiempi_tutkinto_suoritettu = 'K' then 'amk_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_amk_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,ylamk_sama_aiempi_opintoala1995_avain = Case when ylamk_aiempi_tutkinto_suoritettu = 'K' then 'ylamk_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_ylamk_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,ykk_sama_aiempi_opintoala1995_avain = Case when ykk_aiempi_tutkinto_suoritettu = 'K' then 'ykk_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_ykk_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,laaker_sama_aiempi_opintoala1995_avain = Case when laaker_aiempi_tutkinto_suoritettu = 'K' then 'laaker_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_laaker_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,lis_sama_aiempi_opintoala1995_avain = Case when lis_aiempi_tutkinto_suoritettu = 'K' then 'lis_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_lis_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,toht_sama_aiempi_opintoala1995_avain = Case when toht_aiempi_tutkinto_suoritettu = 'K' then 'toht_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi = d_toht_kl_ala.opintoala1995_koodi then 'K' else 'E' end else '-1' end
,yliopistotutkinto_sama_aiempi_opintoala1995_avain = Case when 'K' in
(akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'yliopistotutkinto_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi in (
d_akk_kl_ala.opintoala1995_koodi,
d_ykk_kl_ala.opintoala1995_koodi,
d_laaker_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end else '-1' end
,yo_jatkotutkinto_sama_aiempi_opintoala1995_avain = Case when 'K' in
(lis_aiempi_tutkinto_suoritettu,
toht_aiempi_tutkinto_suoritettu)
then 'yo_jatkotutkinto_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi in (
d_lis_kl_ala.opintoala1995_koodi,
d_toht_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end else '-1' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995_avain = Case when 'K' in
(opist_aiempi_tutkinto_suoritettu,
ammka_aiempi_tutkinto_suoritettu)
then 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi in (
d_opist_kl_ala.opintoala1995_koodi,
d_ammka_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end else '-1' end
,korkeakoulututkinto_sama_aiempi_opintoala1995_avain = Case when 'K' in
(amk_aiempi_tutkinto_suoritettu,
ylamk_aiempi_tutkinto_suoritettu,
akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'korkeakoulututkinto_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi in (
d_amk_kl_ala.opintoala1995_koodi,
d_ylamk_kl_ala.opintoala1995_koodi,
d_akk_kl_ala.opintoala1995_koodi,
d_ykk_kl_ala.opintoala1995_koodi,
d_laaker_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end else '-1' end
,toisen_asteen_tutkinto_sama_aiempi_opintoala1995_avain = Case when 'K' in
(lu_aiempi_tutkinto_suoritettu,
amm_aiempi_tutkinto_suoritettu)
then 'toisen_asteen_tutkinto_sama_aiempi_opintoala1995_' + Case when d_kl_ala.opintoala1995_koodi in (
d_lu_kl_ala.opintoala1995_koodi,
d_amm_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end else '-1' end
,lu_sama_aiempi_iscfibroad2013_avain = Case when lu_aiempi_tutkinto_suoritettu = 'K' then 'lu_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_lu_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,amm_sama_aiempi_iscfibroad2013_avain = Case when amm_aiempi_tutkinto_suoritettu = 'K' then 'amm_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_amm_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,opist_sama_aiempi_iscfibroad2013_avain = Case when opist_aiempi_tutkinto_suoritettu = 'K' then 'opist_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_opist_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,ammka_sama_aiempi_iscfibroad2013_avain = Case when ammka_aiempi_tutkinto_suoritettu = 'K' then 'ammka_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_ammka_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,akk_sama_aiempi_iscfibroad2013_avain = Case when akk_aiempi_tutkinto_suoritettu = 'K' then 'akk_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_akk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,amk_sama_aiempi_iscfibroad2013_avain = Case when amk_aiempi_tutkinto_suoritettu = 'K' then 'amk_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_amk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,ylamk_sama_aiempi_iscfibroad2013_avain = Case when ylamk_aiempi_tutkinto_suoritettu = 'K' then 'ylamk_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_ylamk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,ykk_sama_aiempi_iscfibroad2013_avain = Case when ykk_aiempi_tutkinto_suoritettu = 'K' then 'ykk_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_ykk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,laaker_sama_aiempi_iscfibroad2013_avain = Case when laaker_aiempi_tutkinto_suoritettu = 'K' then 'laaker_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_laaker_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,lis_sama_aiempi_iscfibroad2013_avain = Case when lis_aiempi_tutkinto_suoritettu = 'K' then 'lis_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_lis_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,toht_sama_aiempi_iscfibroad2013_avain = Case when toht_aiempi_tutkinto_suoritettu = 'K' then 'toht_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi = d_toht_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end else '-1' end
,yliopistotutkinto_sama_aiempi_iscfibroad2013_avain = Case when 'K' in
(akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'yliopistotutkinto_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi in (
d_akk_kl_ala.iscfibroad2013_koodi,
d_ykk_kl_ala.iscfibroad2013_koodi,
d_laaker_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end else '-1' end
,yo_jatkotutkinto_sama_aiempi_iscfibroad2013_avain = Case when 'K' in
(lis_aiempi_tutkinto_suoritettu,
toht_aiempi_tutkinto_suoritettu)
then 'yo_jatkotutkinto_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi in (
d_lis_kl_ala.iscfibroad2013_koodi,
d_toht_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end else '-1' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013_avain = Case when 'K' in
(opist_aiempi_tutkinto_suoritettu,
ammka_aiempi_tutkinto_suoritettu)
then 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi in (
d_opist_kl_ala.iscfibroad2013_koodi,
d_ammka_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end else '-1' end
,korkeakoulututkinto_sama_aiempi_iscfibroad2013_avain = Case when 'K' in
(amk_aiempi_tutkinto_suoritettu,
ylamk_aiempi_tutkinto_suoritettu,
akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'korkeakoulututkinto_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi in (
d_amk_kl_ala.iscfibroad2013_koodi,
d_ylamk_kl_ala.iscfibroad2013_koodi,
d_akk_kl_ala.iscfibroad2013_koodi,
d_ykk_kl_ala.iscfibroad2013_koodi,
d_laaker_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end else '-1' end
,toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013_avain = Case when 'K' in
(lu_aiempi_tutkinto_suoritettu,
amm_aiempi_tutkinto_suoritettu)
then 'toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013_' + Case when d_kl_ala.iscfibroad2013_koodi in (
d_lu_kl_ala.iscfibroad2013_koodi,
d_amm_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end else '-1' end
,lu_sama_aiempi_iscfinarrow2013_avain = Case when lu_aiempi_tutkinto_suoritettu = 'K' then 'lu_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_lu_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,amm_sama_aiempi_iscfinarrow2013_avain = Case when amm_aiempi_tutkinto_suoritettu = 'K' then 'amm_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_amm_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,opist_sama_aiempi_iscfinarrow2013_avain = Case when opist_aiempi_tutkinto_suoritettu = 'K' then 'opist_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_opist_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,ammka_sama_aiempi_iscfinarrow2013_avain = Case when ammka_aiempi_tutkinto_suoritettu = 'K' then 'ammka_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_ammka_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,akk_sama_aiempi_iscfinarrow2013_avain = Case when akk_aiempi_tutkinto_suoritettu = 'K' then 'akk_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_akk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,amk_sama_aiempi_iscfinarrow2013_avain = Case when amk_aiempi_tutkinto_suoritettu = 'K' then 'amk_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_amk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,ylamk_sama_aiempi_iscfinarrow2013_avain = Case when ylamk_aiempi_tutkinto_suoritettu = 'K' then 'ylamk_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_ylamk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,ykk_sama_aiempi_iscfinarrow2013_avain = Case when ykk_aiempi_tutkinto_suoritettu = 'K' then 'ykk_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_ykk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,laaker_sama_aiempi_iscfinarrow2013_avain = Case when laaker_aiempi_tutkinto_suoritettu = 'K' then 'laaker_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_laaker_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,lis_sama_aiempi_iscfinarrow2013_avain = Case when lis_aiempi_tutkinto_suoritettu = 'K' then 'lis_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_lis_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,toht_sama_aiempi_iscfinarrow2013_avain = Case when toht_aiempi_tutkinto_suoritettu = 'K' then 'toht_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi = d_toht_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end else '-1' end
,yliopistotutkinto_sama_aiempi_iscfinarrow2013_avain = Case when 'K' in
(akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'yliopistotutkinto_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi in (
d_akk_kl_ala.iscfinarrow2013_koodi,
d_ykk_kl_ala.iscfinarrow2013_koodi,
d_laaker_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end else '-1' end
,yo_jatkotutkinto_sama_aiempi_iscfinarrow2013_avain = Case when 'K' in
(lis_aiempi_tutkinto_suoritettu,
toht_aiempi_tutkinto_suoritettu)
then 'yo_jatkotutkinto_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi in (
d_lis_kl_ala.iscfinarrow2013_koodi,
d_toht_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end else '-1' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013_avain = Case when 'K' in
(opist_aiempi_tutkinto_suoritettu,
ammka_aiempi_tutkinto_suoritettu)
then 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi in (
d_opist_kl_ala.iscfinarrow2013_koodi,
d_ammka_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end else '-1' end
,korkeakoulututkinto_sama_aiempi_iscfinarrow2013_avain = Case when 'K' in
(amk_aiempi_tutkinto_suoritettu,
ylamk_aiempi_tutkinto_suoritettu,
akk_aiempi_tutkinto_suoritettu,
ykk_aiempi_tutkinto_suoritettu,
laaker_aiempi_tutkinto_suoritettu)
then 'korkeakoulututkinto_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi in (
d_amk_kl_ala.iscfinarrow2013_koodi,
d_ylamk_kl_ala.iscfinarrow2013_koodi,
d_akk_kl_ala.iscfinarrow2013_koodi,
d_ykk_kl_ala.iscfinarrow2013_koodi,
d_laaker_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end else '-1' end
,toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013_avain = Case when 'K' in
(lu_aiempi_tutkinto_suoritettu,
amm_aiempi_tutkinto_suoritettu)
then 'toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013_' + Case when d_kl_ala.iscfinarrow2013_koodi in (
d_lu_kl_ala.iscfinarrow2013_koodi,
d_amm_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end else '-1' end
/*
,lu_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_lu_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,amm_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_amm_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,opist_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_opist_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,ammka_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_ammka_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,akk_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_akk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,amk_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_amk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,ylamk_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_ylamk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,ykk_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_ykk_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,laaker_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_laaker_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,lis_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_lis_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,toht_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi = d_toht_kl_ala.koulutusala2002_koodi then 'K' else 'E' end
,yliopistotutkinto_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi in (
d_akk_kl_ala.koulutusala2002_koodi,
d_ykk_kl_ala.koulutusala2002_koodi,
d_laaker_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end
,yo_jatkotutkinto_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi in (
d_lis_kl_ala.koulutusala2002_koodi,
d_toht_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi in (
d_opist_kl_ala.koulutusala2002_koodi,
d_ammka_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end
,korkeakoulututkinto_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi in (
d_amk_kl_ala.koulutusala2002_koodi,
d_ylamk_kl_ala.koulutusala2002_koodi,
d_akk_kl_ala.koulutusala2002_koodi,
d_ykk_kl_ala.koulutusala2002_koodi,
d_laaker_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end
,toisen_asteen_tutkinto_sama_aiempi_koulutusala2002 = Case when d_kl_ala.koulutusala2002_koodi in (
d_lu_kl_ala.koulutusala2002_koodi,
d_amm_kl_ala.koulutusala2002_koodi)
then 'K' else 'E' end
,lu_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_lu_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,amm_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_amm_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,opist_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_opist_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,ammka_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_ammka_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,akk_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_akk_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,amk_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_amk_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,ylamk_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_ylamk_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,ykk_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_ykk_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,laaker_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_laaker_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,lis_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_lis_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,toht_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi = d_toht_kl_ala.opintoala2002_koodi then 'K' else 'E' end
,yliopistotutkinto_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi in (
d_akk_kl_ala.opintoala2002_koodi,
d_ykk_kl_ala.opintoala2002_koodi,
d_laaker_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end
,yo_jatkotutkinto_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi in (
d_lis_kl_ala.opintoala2002_koodi,
d_toht_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi in (
d_opist_kl_ala.opintoala2002_koodi,
d_ammka_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end
,korkeakoulututkinto_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi in (
d_amk_kl_ala.opintoala2002_koodi,
d_ylamk_kl_ala.opintoala2002_koodi,
d_akk_kl_ala.opintoala2002_koodi,
d_ykk_kl_ala.opintoala2002_koodi,
d_laaker_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end
,toisen_asteen_tutkinto_sama_aiempi_opintoala2002 = Case when d_kl_ala.opintoala2002_koodi in (
d_lu_kl_ala.opintoala2002_koodi,
d_amm_kl_ala.opintoala2002_koodi)
then 'K' else 'E' end
,lu_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_lu_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,amm_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_amm_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,opist_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_opist_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,ammka_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_ammka_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,akk_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_akk_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,amk_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_amk_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,ylamk_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_ylamk_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,ykk_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_ykk_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,laaker_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_laaker_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,lis_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_lis_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,toht_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi = d_toht_kl_ala.opintoala1995_koodi then 'K' else 'E' end
,yliopistotutkinto_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi in (
d_akk_kl_ala.opintoala1995_koodi,
d_ykk_kl_ala.opintoala1995_koodi,
d_laaker_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end
,yo_jatkotutkinto_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi in (
d_lis_kl_ala.opintoala1995_koodi,
d_toht_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi in (
d_opist_kl_ala.opintoala1995_koodi,
d_ammka_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end
,korkeakoulututkinto_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi in (
d_amk_kl_ala.opintoala1995_koodi,
d_ylamk_kl_ala.opintoala1995_koodi,
d_akk_kl_ala.opintoala1995_koodi,
d_ykk_kl_ala.opintoala1995_koodi,
d_laaker_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end
,toisen_asteen_tutkinto_sama_aiempi_opintoala1995 = Case when d_kl_ala.opintoala1995_koodi in (
d_lu_kl_ala.opintoala1995_koodi,
d_amm_kl_ala.opintoala1995_koodi)
then 'K' else 'E' end
--lisätään vielä iscfibroad2013
,lu_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_lu_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,amm_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_amm_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,opist_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_opist_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,ammka_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_ammka_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,akk_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_akk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,amk_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_amk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,ylamk_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_ylamk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,ykk_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_ykk_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,laaker_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_laaker_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,lis_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_lis_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,toht_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi = d_toht_kl_ala.iscfibroad2013_koodi then 'K' else 'E' end
,yliopistotutkinto_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi in (
d_akk_kl_ala.iscfibroad2013_koodi,
d_ykk_kl_ala.iscfibroad2013_koodi,
d_laaker_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end
,yo_jatkotutkinto_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi in (
d_lis_kl_ala.iscfibroad2013_koodi,
d_toht_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi in (
d_opist_kl_ala.iscfibroad2013_koodi,
d_ammka_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end
,korkeakoulututkinto_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi in (
d_amk_kl_ala.iscfibroad2013_koodi,
d_ylamk_kl_ala.iscfibroad2013_koodi,
d_akk_kl_ala.iscfibroad2013_koodi,
d_ykk_kl_ala.iscfibroad2013_koodi,
d_laaker_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end
,toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013 = Case when d_kl_ala.iscfibroad2013_koodi in (
d_lu_kl_ala.iscfibroad2013_koodi,
d_amm_kl_ala.iscfibroad2013_koodi)
then 'K' else 'E' end
--lisätään vielä iscfinarrow2013
,lu_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_lu_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,amm_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_amm_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,opist_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_opist_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,ammka_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_ammka_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,akk_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_akk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,amk_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_amk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,ylamk_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_ylamk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,ykk_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_ykk_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,laaker_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_laaker_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,lis_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_lis_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,toht_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi = d_toht_kl_ala.iscfinarrow2013_koodi then 'K' else 'E' end
,yliopistotutkinto_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi in (
d_akk_kl_ala.iscfinarrow2013_koodi,
d_ykk_kl_ala.iscfinarrow2013_koodi,
d_laaker_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end
,yo_jatkotutkinto_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi in (
d_lis_kl_ala.iscfinarrow2013_koodi,
d_toht_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end
,opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi in (
d_opist_kl_ala.iscfinarrow2013_koodi,
d_ammka_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end
,korkeakoulututkinto_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi in (
d_amk_kl_ala.iscfinarrow2013_koodi,
d_ylamk_kl_ala.iscfinarrow2013_koodi,
d_akk_kl_ala.iscfinarrow2013_koodi,
d_ykk_kl_ala.iscfinarrow2013_koodi,
d_laaker_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end
,toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013 = Case when d_kl_ala.iscfinarrow2013_koodi in (
d_lu_kl_ala.iscfinarrow2013_koodi,
d_amm_kl_ala.iscfinarrow2013_koodi)
then 'K' else 'E' end
*/
FROM [dbo].[f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7]
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_kl_ala
ON d_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.koulutusluokitus_avain
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_lu_kl_ala
ON d_lu_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.lukoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_amm_kl_ala
ON d_amm_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.ammkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_opist_kl_ala
ON d_opist_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.opistkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ammka_kl_ala
ON d_ammka_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.ammkakoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_akk_kl_ala
ON d_akk_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.akkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_amk_kl_ala
ON d_amk_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.amkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ylamk_kl_ala
ON d_ylamk_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.ylamkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ykk_kl_ala
ON d_ykk_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.ykkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_laaker_kl_ala
ON d_laaker_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.laakerkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_lis_kl_ala
ON d_lis_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.liskoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_toht_kl_ala
ON d_toht_kl_ala.koulutusluokitus_avain = f_tutkinnon_suorittaneiden_aiempi_koulutus_4_7.tohtkoulk
) f
LEFT JOIN VipunenTK.dbo.d_tarkastelujakso d_tj
ON d_tj.tarkastelujakso_koodi = f.tarkastelujakso_koodi
LEFT JOIN VipunenTK.dbo.d_sukupuoli d_sp
ON d_sp.sukupuoli_koodi = f.sukupuoli_koodi
LEFT JOIN VipunenTK.dbo.d_aidinkieli_versio1 d_ak1
ON d_ak1.aidinkieli_versio1_koodi = f.aidinkieli_versio1_koodi
LEFT JOIN VipunenTK.dbo.d_ika
ON d_ika.ika_avain = f.ika_avain
LEFT JOIN VipunenTK.dbo.d_kausi d_alo
ON d_alo.kausi_id = f.aloituskausikoodi
LEFT JOIN VipunenTK.dbo.d_kausi d_suo
ON d_suo.kausi_id = f.suorituskausikoodi
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_kl
ON d_kl.koulutusluokitus_avain = f.koulutusluokitus_avain
LEFT JOIN VipunenTK.dbo.d_oppisopimuskoulutus d_osk
ON d_osk.oppisopimuskoulutus_koodi = f.oppisopimuskoulutus_koodi
LEFT JOIN VipunenTK.dbo.d_amm_sopimusjaksot d_amm_sj
ON d_amm_sj.koodi = f.sopimusjaksot
LEFT JOIN VipunenTK.dbo.d_ammatillisen_koulutuksen_koulutuslaji d_akk
ON d_akk.ammatillisen_koulutuksen_koulutuslaji_koodi = f.ammatillisen_koulutuksen_koulutuslaji_koodi
LEFT JOIN VipunenTK.dbo.d_opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus d_on
ON d_on.opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_koodi = f.opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_koodi
LEFT JOIN VipunenTK.dbo.d_nuorten_aikuisten_koulutus_amm d_na
ON d_na.nuorten_aikuisten_koulutus_amm_koodi = f.nuorten_aikuisten_koulutus_amm_koodi
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_lu_kl
ON d_lu_kl.koulutusluokitus_avain = f.lukoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_amm_kl
ON d_amm_kl.koulutusluokitus_avain = f.ammkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_opist_kl
ON d_opist_kl.koulutusluokitus_avain = f.opistkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ammka_kl
ON d_ammka_kl.koulutusluokitus_avain = f.ammkakoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_akk_kl
ON d_akk_kl.koulutusluokitus_avain = f.akkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_amk_kl
ON d_amk_kl.koulutusluokitus_avain = f.amkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ylamk_kl
ON d_ylamk_kl.koulutusluokitus_avain = f.ylamkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_ykk_kl
ON d_ykk_kl.koulutusluokitus_avain = f.ykkkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_laaker_kl
ON d_laaker_kl.koulutusluokitus_avain = f.laakerkoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_lis_kl
ON d_lis_kl.koulutusluokitus_avain = f.liskoulk
LEFT JOIN VipunenTK.dbo.d_koulutusluokitus d_toht_kl
ON d_toht_kl.koulutusluokitus_avain = f.tohtkoulk
LEFT JOIN VipunenTK.dbo.d_nuorten_aikuisten_koulutus_amm d_lu_na
ON d_lu_na.nuorten_aikuisten_koulutus_amm_koodi = f.luaikoul
LEFT JOIN VipunenTK.dbo.d_oppisopimuskoulutus d_amm_osk
ON d_amm_osk.oppisopimuskoulutus_koodi = f.ammoppis
LEFT JOIN VipunenTK.dbo.d_amm_sopimusjaksot d_amm_sj2
ON d_amm_sj2.koodi = f.amm_sopimusjaksot
LEFT JOIN VipunenTK.dbo.d_ammatillisen_koulutuksen_koulutuslaji d_amm_akk
ON d_amm_akk.ammatillisen_koulutuksen_koulutuslaji_koodi = f.ammtutklaja
LEFT JOIN VipunenTK.dbo.d_opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus d_amm_on
ON d_amm_on.opetussuunnitelmaperusteinen_koulutus_nayttotutkintoon_valmistava_koulutus_koodi = f.ammtutktav
LEFT JOIN VipunenTK.dbo.d_nuorten_aikuisten_koulutus_amm d_amk_na
ON d_amk_na.nuorten_aikuisten_koulutus_amm_koodi = f.amkaikoul
LEFT JOIN VipunenTK.dbo.d_ammatillinen_koulutus_luokittelu d_ammkl
ON d_ammkl.ammatillisen_koulutuksen_luokittelu_avain = f.ammatillinen_koulutus_ryhma_avain
LEFT JOIN VipunenTK.dbo.d_tutkintojen_maara d_amm_tm
ON d_amm_tm.[tutkintojen_maara_avain] = f.tutkintojen_maara_amm_avain
LEFT JOIN VipunenTK.dbo.d_tutkintojen_maara d_amk_tm
ON d_amk_tm.[tutkintojen_maara_avain] = f.tutkintojen_maara_amk_avain
LEFT JOIN VipunenTK.dbo.d_tutkintojen_maara d_akk_tm
ON d_akk_tm.[tutkintojen_maara_avain] = f.tutkintojen_maara_akk_avain
LEFT JOIN VipunenTK.dbo.d_tutkintojen_maara d_ykk_tm
ON d_ykk_tm.[tutkintojen_maara_avain] = f.tutkintojen_maara_ykk_avain
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_lu_ika
ON d_lu_ika.koulutuksesta_kulunut_aika_koodi = cast(f.lu_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_amm_ika
ON d_amm_ika.koulutuksesta_kulunut_aika_koodi = cast(f.amm_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_opist_ika
ON d_opist_ika.koulutuksesta_kulunut_aika_koodi = cast(f.opist_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_ammka_ika
ON d_ammka_ika.koulutuksesta_kulunut_aika_koodi = cast(f.ammka_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_amk_ika
ON d_amk_ika.koulutuksesta_kulunut_aika_koodi = cast(f.amk_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_akk_ika
ON d_akk_ika.koulutuksesta_kulunut_aika_koodi = cast(f.akk_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_ylamk_ika
ON d_ylamk_ika.koulutuksesta_kulunut_aika_koodi = cast(f.ylamk_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_ykk_ika
ON d_ykk_ika.koulutuksesta_kulunut_aika_koodi = cast(f.ykk_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_laaker_ika
ON d_laaker_ika.koulutuksesta_kulunut_aika_koodi = cast(f.laaker_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_lis_ika
ON d_lis_ika.koulutuksesta_kulunut_aika_koodi = cast(f.lis_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_koulutuksesta_kulunut_aika d_toht_ika
ON d_toht_ika.koulutuksesta_kulunut_aika_koodi = cast(f.toht_ika as nvarchar(20))
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_lu_ts
ON d_lu_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_lu_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_amm_ts
ON d_amm_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_amm_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_opist_ts
ON d_opist_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_opisto_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_ammka_ts
ON d_ammka_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_ammka_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_amk_ts
ON d_amk_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_amk_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_akk_ts
ON d_akk_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_akk_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_ylamk_ts
ON d_ylamk_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_ylamk_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_ykk_ts
ON d_ykk_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_ykk_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_laaker_ts
ON d_laaker_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_laaker_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_lis_ts
ON d_lis_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_lis_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_toht_ts
ON d_toht_ts.[tutkinto_suoritettu_avain] = f.tutkinto_suoritettu_toht_avain
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_yo_ts
ON d_yo_ts.[tutkinto_suoritettu_avain] = 'AIK_YO_' + f.yliopistotutkinto_aiempi_suoritettu
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_yojatko_ts
ON d_yojatko_ts.[tutkinto_suoritettu_avain] = 'AIK_YOJATKO_' + f.yo_jatkotutkinto_aiempi_suoritettu
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_opammka_ts
ON d_opammka_ts.[tutkinto_suoritettu_avain] = 'AIK_OP_AMMKA_' + f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_aiempi_suoritettu
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_kk_ts
ON d_kk_ts.[tutkinto_suoritettu_avain] = 'AIK_KK_' + f.korkeakoulututkinto_aiempi_suoritettu
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_aste2_ts
ON d_aste2_ts.[tutkinto_suoritettu_avain] = 'AIK_ASTE2_' + f.toisen_asteen_tutkinto_aiempi_suoritettu
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_amm_r1_ts
ON d_amm_r1_ts.[tutkinto_suoritettu_avain] = 'AIK_AMM_R1_' + f.ammatillisen_tutkinto_aiempi_suoritettu_ryhma1
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_amm_r2_ts
ON d_amm_r2_ts.[tutkinto_suoritettu_avain] = 'AIK_AMM_R2_' + f.ammatillisen_tutkinto_aiempi_suoritettu_ryhma2
LEFT JOIN VipunenTK.dbo.d_tutkinto_suoritettu d_amm_r3_ts
ON d_amm_r3_ts.[tutkinto_suoritettu_avain] = 'AIK_AMM_R3_' + f.ammatillisen_tutkinto_aiempi_suoritettu_ryhma3
-- Kuvaukset koulutusalan pysyvyydelle
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_koulutusala2002 ON lu_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.lu_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_koulutusala2002 ON amm_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.amm_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_koulutusala2002 ON opist_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.opist_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_koulutusala2002 ON ammka_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.ammka_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_koulutusala2002 ON akk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.akk_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_koulutusala2002 ON amk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.amk_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_koulutusala2002 ON ylamk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.ylamk_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_koulutusala2002 ON ykk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.ykk_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_koulutusala2002 ON laaker_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.laaker_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_koulutusala2002 ON lis_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.lis_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_koulutusala2002 ON toht_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.toht_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_koulutusala2002 ON yliopistotutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.yliopistotutkinto_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_koulutusala2002 ON yo_jatkotutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.yo_jatkotutkinto_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_koulutusala2002 ON korkeakoulututkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.korkeakoulututkinto_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_koulutusala2002 ON toisen_asteen_tutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = f.toisen_asteen_tutkinto_sama_aiempi_koulutusala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_opintoala2002 ON lu_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.lu_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_opintoala2002 ON amm_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.amm_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_opintoala2002 ON opist_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.opist_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_opintoala2002 ON ammka_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.ammka_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_opintoala2002 ON akk_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.akk_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_opintoala2002 ON amk_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.amk_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_opintoala2002 ON ylamk_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.ylamk_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_opintoala2002 ON ykk_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.ykk_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_opintoala2002 ON laaker_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.laaker_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_opintoala2002 ON lis_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.lis_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_opintoala2002 ON toht_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.toht_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_opintoala2002 ON yliopistotutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.yliopistotutkinto_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_opintoala2002 ON yo_jatkotutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.yo_jatkotutkinto_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_opintoala2002 ON korkeakoulututkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.korkeakoulututkinto_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_opintoala2002 ON toisen_asteen_tutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = f.toisen_asteen_tutkinto_sama_aiempi_opintoala2002_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_opintoala1995 ON lu_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.lu_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_opintoala1995 ON amm_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.amm_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_opintoala1995 ON opist_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.opist_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_opintoala1995 ON ammka_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.ammka_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_opintoala1995 ON akk_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.akk_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_opintoala1995 ON amk_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.amk_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_opintoala1995 ON ylamk_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.ylamk_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_opintoala1995 ON ykk_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.ykk_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_opintoala1995 ON laaker_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.laaker_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_opintoala1995 ON lis_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.lis_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_opintoala1995 ON toht_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.toht_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_opintoala1995 ON yliopistotutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.yliopistotutkinto_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_opintoala1995 ON yo_jatkotutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.yo_jatkotutkinto_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_opintoala1995 ON korkeakoulututkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.korkeakoulututkinto_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_opintoala1995 ON toisen_asteen_tutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = f.toisen_asteen_tutkinto_sama_aiempi_opintoala1995_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_iscfibroad2013 ON lu_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.lu_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_iscfibroad2013 ON amm_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.amm_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_iscfibroad2013 ON opist_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.opist_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_iscfibroad2013 ON ammka_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.ammka_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_iscfibroad2013 ON akk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.akk_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_iscfibroad2013 ON amk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.amk_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_iscfibroad2013 ON ylamk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.ylamk_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_iscfibroad2013 ON ykk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.ykk_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_iscfibroad2013 ON laaker_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.laaker_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_iscfibroad2013 ON lis_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.lis_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_iscfibroad2013 ON toht_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.toht_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_iscfibroad2013 ON yliopistotutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.yliopistotutkinto_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_iscfibroad2013 ON yo_jatkotutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.yo_jatkotutkinto_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_iscfibroad2013 ON korkeakoulututkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.korkeakoulututkinto_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013 ON toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = f.toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_iscfinarrow2013 ON lu_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.lu_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_iscfinarrow2013 ON amm_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.amm_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_iscfinarrow2013 ON opist_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.opist_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_iscfinarrow2013 ON ammka_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.ammka_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_iscfinarrow2013 ON akk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.akk_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_iscfinarrow2013 ON amk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.amk_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_iscfinarrow2013 ON ylamk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.ylamk_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_iscfinarrow2013 ON ykk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.ykk_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_iscfinarrow2013 ON laaker_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.laaker_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_iscfinarrow2013 ON lis_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.lis_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_iscfinarrow2013 ON toht_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.toht_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_iscfinarrow2013 ON yliopistotutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.yliopistotutkinto_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_iscfinarrow2013 ON yo_jatkotutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.yo_jatkotutkinto_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_iscfinarrow2013 ON korkeakoulututkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.korkeakoulututkinto_sama_aiempi_iscfinarrow2013_avain
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013 ON toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = f.toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013_avain
/*
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_koulutusala2002 ON lu_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'lu_sama_aiempi_koulutusala2002_'+f.lu_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_koulutusala2002 ON amm_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'amm_sama_aiempi_koulutusala2002_'+f.amm_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_koulutusala2002 ON opist_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'opist_sama_aiempi_koulutusala2002_'+f.opist_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_koulutusala2002 ON ammka_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'ammka_sama_aiempi_koulutusala2002_'+f.ammka_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_koulutusala2002 ON akk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'akk_sama_aiempi_koulutusala2002_'+f.akk_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_koulutusala2002 ON amk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'amk_sama_aiempi_koulutusala2002_'+f.amk_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_koulutusala2002 ON ylamk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'ylamk_sama_aiempi_koulutusala2002_'+f.ylamk_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_koulutusala2002 ON ykk_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'ykk_sama_aiempi_koulutusala2002_'+f.ykk_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_koulutusala2002 ON laaker_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'laaker_sama_aiempi_koulutusala2002_'+f.laaker_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_koulutusala2002 ON lis_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'lis_sama_aiempi_koulutusala2002_'+f.lis_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_koulutusala2002 ON toht_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'toht_sama_aiempi_koulutusala2002_'+f.toht_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_koulutusala2002 ON yliopistotutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'yliopistotutkinto_sama_aiempi_koulutusala2002_'+f.yliopistotutkinto_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_koulutusala2002 ON yo_jatkotutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'yo_jatkotutkinto_sama_aiempi_koulutusala2002_'+f.yo_jatkotutkinto_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002_'+f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_koulutusala2002 ON korkeakoulututkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'korkeakoulututkinto_sama_aiempi_koulutusala2002_'+f.korkeakoulututkinto_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_koulutusala2002 ON toisen_asteen_tutkinto_sama_aiempi_koulutusala2002.sama_koulutusala_avain = 'toisen_asteen_tutkinto_sama_aiempi_koulutusala2002_'+f.toisen_asteen_tutkinto_sama_aiempi_koulutusala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_opintoala2002 ON lu_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'lu_sama_aiempi_opintoala2002_'+f.lu_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_opintoala2002 ON amm_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'amm_sama_aiempi_opintoala2002_'+f.amm_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_opintoala2002 ON opist_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'opist_sama_aiempi_opintoala2002_'+f.opist_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_opintoala2002 ON ammka_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'ammka_sama_aiempi_opintoala2002_'+f.ammka_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_opintoala2002 ON akk_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'akk_sama_aiempi_opintoala2002_'+f.akk_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_opintoala2002 ON amk_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'amk_sama_aiempi_opintoala2002_'+f.amk_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_opintoala2002 ON ylamk_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'ylamk_sama_aiempi_opintoala2002_'+f.ylamk_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_opintoala2002 ON ykk_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'ykk_sama_aiempi_opintoala2002_'+f.ykk_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_opintoala2002 ON laaker_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'laaker_sama_aiempi_opintoala2002_'+f.laaker_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_opintoala2002 ON lis_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'lis_sama_aiempi_opintoala2002_'+f.lis_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_opintoala2002 ON toht_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'toht_sama_aiempi_opintoala2002_'+f.toht_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_opintoala2002 ON yliopistotutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'yliopistotutkinto_sama_aiempi_opintoala2002_'+f.yliopistotutkinto_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_opintoala2002 ON yo_jatkotutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'yo_jatkotutkinto_sama_aiempi_opintoala2002_'+f.yo_jatkotutkinto_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002_'+f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_opintoala2002 ON korkeakoulututkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'korkeakoulututkinto_sama_aiempi_opintoala2002_'+f.korkeakoulututkinto_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_opintoala2002 ON toisen_asteen_tutkinto_sama_aiempi_opintoala2002.sama_koulutusala_avain = 'toisen_asteen_tutkinto_sama_aiempi_opintoala2002_'+f.toisen_asteen_tutkinto_sama_aiempi_opintoala2002
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_opintoala1995 ON lu_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'lu_sama_aiempi_opintoala1995_'+f.lu_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_opintoala1995 ON amm_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'amm_sama_aiempi_opintoala1995_'+f.amm_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_opintoala1995 ON opist_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'opist_sama_aiempi_opintoala1995_'+f.opist_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_opintoala1995 ON ammka_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'ammka_sama_aiempi_opintoala1995_'+f.ammka_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_opintoala1995 ON akk_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'akk_sama_aiempi_opintoala1995_'+f.akk_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_opintoala1995 ON amk_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'amk_sama_aiempi_opintoala1995_'+f.amk_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_opintoala1995 ON ylamk_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'ylamk_sama_aiempi_opintoala1995_'+f.ylamk_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_opintoala1995 ON ykk_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'ykk_sama_aiempi_opintoala1995_'+f.ykk_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_opintoala1995 ON laaker_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'laaker_sama_aiempi_opintoala1995_'+f.laaker_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_opintoala1995 ON lis_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'lis_sama_aiempi_opintoala1995_'+f.lis_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_opintoala1995 ON toht_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'toht_sama_aiempi_opintoala1995_'+f.toht_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_opintoala1995 ON yliopistotutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'yliopistotutkinto_sama_aiempi_opintoala1995_'+f.yliopistotutkinto_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_opintoala1995 ON yo_jatkotutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'yo_jatkotutkinto_sama_aiempi_opintoala1995_'+f.yo_jatkotutkinto_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995_'+f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_opintoala1995 ON korkeakoulututkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'korkeakoulututkinto_sama_aiempi_opintoala1995_'+f.korkeakoulututkinto_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_opintoala1995 ON toisen_asteen_tutkinto_sama_aiempi_opintoala1995.sama_koulutusala_avain = 'toisen_asteen_tutkinto_sama_aiempi_opintoala1995_'+f.toisen_asteen_tutkinto_sama_aiempi_opintoala1995
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_iscfibroad2013 ON lu_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'lu_sama_aiempi_iscfibroad2013_'+f.lu_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_iscfibroad2013 ON amm_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'amm_sama_aiempi_iscfibroad2013_'+f.amm_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_iscfibroad2013 ON opist_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'opist_sama_aiempi_iscfibroad2013_'+f.opist_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_iscfibroad2013 ON ammka_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'ammka_sama_aiempi_iscfibroad2013_'+f.ammka_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_iscfibroad2013 ON akk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'akk_sama_aiempi_iscfibroad2013_'+f.akk_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_iscfibroad2013 ON amk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'amk_sama_aiempi_iscfibroad2013_'+f.amk_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_iscfibroad2013 ON ylamk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'ylamk_sama_aiempi_iscfibroad2013_'+f.ylamk_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_iscfibroad2013 ON ykk_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'ykk_sama_aiempi_iscfibroad2013_'+f.ykk_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_iscfibroad2013 ON laaker_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'laaker_sama_aiempi_iscfibroad2013_'+f.laaker_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_iscfibroad2013 ON lis_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'lis_sama_aiempi_iscfibroad2013_'+f.lis_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_iscfibroad2013 ON toht_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'toht_sama_aiempi_iscfibroad2013_'+f.toht_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_iscfibroad2013 ON yliopistotutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'yliopistotutkinto_sama_aiempi_iscfibroad2013_'+f.yliopistotutkinto_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_iscfibroad2013 ON yo_jatkotutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'yo_jatkotutkinto_sama_aiempi_iscfibroad2013_'+f.yo_jatkotutkinto_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013_'+f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_iscfibroad2013 ON korkeakoulututkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'korkeakoulututkinto_sama_aiempi_iscfibroad2013_'+f.korkeakoulututkinto_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013 ON toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013.sama_koulutusala_avain = 'toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013_'+f.toisen_asteen_tutkinto_sama_aiempi_iscfibroad2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lu_sama_aiempi_iscfinarrow2013 ON lu_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'lu_sama_aiempi_iscfinarrow2013_'+f.lu_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amm_sama_aiempi_iscfinarrow2013 ON amm_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'amm_sama_aiempi_iscfinarrow2013_'+f.amm_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opist_sama_aiempi_iscfinarrow2013 ON opist_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'opist_sama_aiempi_iscfinarrow2013_'+f.opist_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ammka_sama_aiempi_iscfinarrow2013 ON ammka_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'ammka_sama_aiempi_iscfinarrow2013_'+f.ammka_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala akk_sama_aiempi_iscfinarrow2013 ON akk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'akk_sama_aiempi_iscfinarrow2013_'+f.akk_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala amk_sama_aiempi_iscfinarrow2013 ON amk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'amk_sama_aiempi_iscfinarrow2013_'+f.amk_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ylamk_sama_aiempi_iscfinarrow2013 ON ylamk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'ylamk_sama_aiempi_iscfinarrow2013_'+f.ylamk_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala ykk_sama_aiempi_iscfinarrow2013 ON ykk_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'ykk_sama_aiempi_iscfinarrow2013_'+f.ykk_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala laaker_sama_aiempi_iscfinarrow2013 ON laaker_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'laaker_sama_aiempi_iscfinarrow2013_'+f.laaker_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala lis_sama_aiempi_iscfinarrow2013 ON lis_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'lis_sama_aiempi_iscfinarrow2013_'+f.lis_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toht_sama_aiempi_iscfinarrow2013 ON toht_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'toht_sama_aiempi_iscfinarrow2013_'+f.toht_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yliopistotutkinto_sama_aiempi_iscfinarrow2013 ON yliopistotutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'yliopistotutkinto_sama_aiempi_iscfinarrow2013_'+f.yliopistotutkinto_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala yo_jatkotutkinto_sama_aiempi_iscfinarrow2013 ON yo_jatkotutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'yo_jatkotutkinto_sama_aiempi_iscfinarrow2013_'+f.yo_jatkotutkinto_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013 ON opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013_'+f.opisto_tai_ammatillisen_korkea_asteen_tutkinto_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala korkeakoulututkinto_sama_aiempi_iscfinarrow2013 ON korkeakoulututkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'korkeakoulututkinto_sama_aiempi_iscfinarrow2013_'+f.korkeakoulututkinto_sama_aiempi_iscfinarrow2013
LEFT JOIN VipunenTK.dbo.d_sama_koulutusala toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013 ON toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013.sama_koulutusala_avain = 'toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013_'+f.toisen_asteen_tutkinto_sama_aiempi_iscfinarrow2013
*/
--Truncate table VipunenTK.dbo.f_tab_tutkinnon_suorittaneiden_aiempi_koulutus_4_7_yhteiset
Drop table VipunenTK.dbo.f_tab_tutkinnon_suorittaneiden_aiempi_koulutus_4_7_yhteiset
Select top 0 * into VipunenTK.dbo.f_tab_tutkinnon_suorittaneiden_aiempi_koulutus_4_7_yhteiset from VipunenTK.[dbo].[v_f_tab_tutkinnon_suorittaneiden_aikaisempi_koulutus_yhteiset]
Insert into VipunenTK.dbo.f_tab_tutkinnon_suorittaneiden_aiempi_koulutus_4_7_yhteiset
Select * from VipunenTK.[dbo].[v_f_tab_tutkinnon_suorittaneiden_aikaisempi_koulutus_yhteiset]
where rivinumero <= 39999
Insert into VipunenTK.dbo.f_tab_tutkinnon_suorittaneiden_aiempi_koulutus_4_7_yhteiset
Select * from VipunenTK.[dbo].[v_f_tab_tutkinnon_suorittaneiden_aikaisempi_koulutus_yhteiset]
where rivinumero >= 40000
|
-- phpMyAdmin SQL Dump
-- version 4.8.0
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Apr 03, 2019 at 05:58 PM
-- Server version: 10.1.31-MariaDB
-- PHP Version: 7.2.4
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `Splice`
--
-- --------------------------------------------------------
-- ----------------------------
-- Table structure for community
-- ----------------------------
DROP TABLE IF EXISTS `community`;
CREATE TABLE `community` (
`id_community` int(11) NOT NULL AUTO_INCREMENT,
`song` varchar(50) NOT NULL DEFAULT 'music.mp3',
PRIMARY KEY (`id_community`)
) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-- ----------------------------
-- Records of community
-- ----------------------------
INSERT INTO `community` VALUES (1, 'music.mp3');
-- ----------------------------
-- Table structure for dashboard
-- ----------------------------
DROP TABLE IF EXISTS `dashboard`;
CREATE TABLE `dashboard` (
`id_dashboard` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(50) NOT NULL,
`deskripsi` longtext NOT NULL,
`gambar` varchar(100) NOT NULL DEFAULT 'gambar.jpg',
PRIMARY KEY (`id_dashboard`)
) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-- ----------------------------
-- Records of dashboard
-- ----------------------------
INSERT INTO `dashboard` VALUES (1, 'Membuat music yang indah','Hal pertama yang kita dengar tentang musik adalah..','gambar.png');
--
-- Table structure for table `login`
--
CREATE TABLE `login` (
`No` int(11) NOT NULL,
`Username` varchar(100) NOT NULL,
`Password` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `login`
--
INSERT INTO `login` VALUES (1, 'admin', 'admin');
-- --------------------------------------------------------
--
-- Table structure for table `signup`
--
CREATE TABLE `signup` (
`Name` varchar(100) NOT NULL,
`Username` varchar(100) NOT NULL,
`Password` varchar(100) NOT NULL,
`Email` varchar(100) NOT NULL,
`avatar` varchar(100) NOT NULL DEFAULT 'noavatar.png',
`Bio` varchar(150) NOT NULL DEFAULT 'Music my life',
`RegisTime` varchar(25) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `signup`
--
INSERT INTO `signup` VALUES ('admin', 'admin', 'admin', '<EMAIL>', 'noavatar.png', 'Music my life','03-04-2019, 22:50:23');
-- ----------------------------
-- Table structure for project
-- ----------------------------
CREATE TABLE `project` (
`NoPro` int(11) NOT NULL,
`Username` varchar(100) NOT NULL,
`project_name` varchar(50) NOT NULL DEFAULT 'empty',
`deskripsi` varchar(100) NOT NULL DEFAULT 'empty'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- ----------------------------
-- Records of project
-- ----------------------------
INSERT INTO `project` VALUES (1,'admin','empty','empty');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `project`
--
ALTER TABLE `project`
ADD PRIMARY KEY (`NoPro`);
--
-- Indexes for table `login`
--
ALTER TABLE `login`
ADD PRIMARY KEY (`No`);
--
-- Indexes for table `signup`
--
ALTER TABLE `signup`
ADD PRIMARY KEY (`Username`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `login`
--
ALTER TABLE `login`
MODIFY `No` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
COMMIT;
--
-- AUTO_INCREMENT for table `project`
--
ALTER TABLE `project`
MODIFY `NoPro` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
CREATE TABLE payload (name text, value blob)
CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT)
|
ALTER TABLE gocardless_mandates ALTER COLUMN gocardless_creditor_id DROP NOT NULL; |
-- phpMyAdmin SQL Dump
-- version 3.3.9
-- http://www.phpmyadmin.net
--
-- Host: localhost
-- Generation Time: Apr 07, 2019 at 10:12 AM
-- Server version: 5.5.8
-- PHP Version: 5.3.5
SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Database: `rental`
--
-- --------------------------------------------------------
--
-- Table structure for table `tbl_booking`
--
CREATE TABLE IF NOT EXISTS `tbl_booking` (
`BookingId` int(11) NOT NULL AUTO_INCREMENT,
`UserId` int(11) NOT NULL,
`SelectedDates` varchar(1000) NOT NULL,
`Days` int(20) NOT NULL,
`VehicleId` int(11) NOT NULL,
`TotalPayment` int(11) NOT NULL,
`BookingDate` datetime NOT NULL,
`AccidentalAmt` int(20) NOT NULL,
`TheftAmt` int(20) NOT NULL,
`IsVerify` varchar(10) NOT NULL,
`Status` varchar(20) NOT NULL,
`IsPay` varchar(10) NOT NULL,
`WalletAmount` varchar(11) DEFAULT NULL,
`TranscationNo` varchar(100) NOT NULL,
PRIMARY KEY (`BookingId`),
KEY `Userid` (`UserId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=22 ;
--
-- Dumping data for table `tbl_booking`
--
INSERT INTO `tbl_booking` (`BookingId`, `UserId`, `SelectedDates`, `Days`, `VehicleId`, `TotalPayment`, `BookingDate`, `AccidentalAmt`, `TheftAmt`, `IsVerify`, `Status`, `IsPay`, `WalletAmount`, `TranscationNo`) VALUES
(21, 57, '2019-04-19 2019-04-20', 2, 31, 9950, '2019-04-04 00:00:00', 500, 500, 'Verified', 'Approved', 'Yes', NULL, '17e1b737300b804416c5');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_city`
--
CREATE TABLE IF NOT EXISTS `tbl_city` (
`CityId` int(20) NOT NULL AUTO_INCREMENT,
`CityName` varchar(20) NOT NULL,
`CityImg` varchar(20) NOT NULL,
`StateId` int(20) NOT NULL,
PRIMARY KEY (`CityId`),
KEY `Stateid` (`StateId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=25 ;
--
-- Dumping data for table `tbl_city`
--
INSERT INTO `tbl_city` (`CityId`, `CityName`, `CityImg`, `StateId`) VALUES
(7, 'Surat', 'City_31499_7_.jpg', 50),
(8, 'Ahemdabad', 'City_50273_8_.jpg', 50),
(9, 'Vadodra', 'City_95296_9_.jpg', 50),
(10, 'Vapi', 'City_95776_10_.png', 50),
(11, 'GandhiNagar', 'City_46723_11_.jpg', 50),
(12, 'Rajkot', 'City_92971_12_.png', 50),
(13, 'ShriNagar', 'City_88210_13_.jpg', 48),
(14, 'LolKatta', 'City_97875_14_.jpg', 51),
(15, 'Siliguri', 'City_67247_15_.png', 51),
(16, 'Habra', 'City_66208_16_.jpg', 51),
(17, 'Medinipur', 'City_96017_17_.jpg', 51),
(18, 'Pune', 'City_93872_18_.jpg', 52),
(19, 'Pune', 'City_36376_19_.jpg', 52),
(20, 'Dhule', 'City_18020_20_.jpg', 52),
(21, 'JalGav', 'City_93532_21_.png', 52),
(22, 'Surat', '', 48),
(23, 'vapi', 'City_72393_23_.jpg', 50),
(24, 'ABC', 'City_27037_24_.jpg', 53);
-- --------------------------------------------------------
--
-- Table structure for table `tbl_company`
--
CREATE TABLE IF NOT EXISTS `tbl_company` (
`CompanyId` int(20) NOT NULL AUTO_INCREMENT,
`CompanyName` varchar(20) NOT NULL,
`CompanyLogo` varchar(30) NOT NULL,
`TypeId` int(20) NOT NULL,
PRIMARY KEY (`CompanyId`),
KEY `TypeId` (`TypeId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=54 ;
--
-- Dumping data for table `tbl_company`
--
INSERT INTO `tbl_company` (`CompanyId`, `CompanyName`, `CompanyLogo`, `TypeId`) VALUES
(46, 'B_M_W', 'Company_40828_46_.jpg', 1),
(48, 'Audi', 'Company_56773_48_.jpg', 1),
(50, 'Mini_Cooper', 'Company_19037_50_.jpg', 1),
(51, 'Hyundai', 'Company_72469_51_.jpg', 1),
(52, 'Ford', 'Company_17339_52_.jpg', 1),
(53, 'Royal_Enfield', 'Company_33523_53_.jpg', 2);
-- --------------------------------------------------------
--
-- Table structure for table `tbl_faq`
--
CREATE TABLE IF NOT EXISTS `tbl_faq` (
`FaqId` int(20) NOT NULL AUTO_INCREMENT,
`FaqCatId` int(20) NOT NULL,
`Question` varchar(200) NOT NULL,
`Answer` varchar(500) NOT NULL,
PRIMARY KEY (`FaqId`),
KEY `Faqcatid` (`FaqCatId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=4 ;
--
-- Dumping data for table `tbl_faq`
--
INSERT INTO `tbl_faq` (`FaqId`, `FaqCatId`, `Question`, `Answer`) VALUES
(2, 20, 'What Your Website Provide An Insurence Service ', 'Yes we provide a Insurence service '),
(3, 19, 'When I receive my order', 'On Given Time Interval');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_faqcat`
--
CREATE TABLE IF NOT EXISTS `tbl_faqcat` (
`FaqCatId` int(20) NOT NULL AUTO_INCREMENT,
`FaqCatName` varchar(20) NOT NULL,
PRIMARY KEY (`FaqCatId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=22 ;
--
-- Dumping data for table `tbl_faqcat`
--
INSERT INTO `tbl_faqcat` (`FaqCatId`, `FaqCatName`) VALUES
(18, 'Payment'),
(19, 'Order'),
(20, 'Insurence'),
(21, 'Delivery');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_feedback`
--
CREATE TABLE IF NOT EXISTS `tbl_feedback` (
`FeedBackId` int(20) NOT NULL AUTO_INCREMENT,
`FeedBackText` varchar(50) NOT NULL,
`Date` varchar(20) NOT NULL,
PRIMARY KEY (`FeedBackId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
--
-- Dumping data for table `tbl_feedback`
--
-- --------------------------------------------------------
--
-- Table structure for table `tbl_log`
--
CREATE TABLE IF NOT EXISTS `tbl_log` (
`UserId` int(20) NOT NULL AUTO_INCREMENT,
`Name` varchar(20) DEFAULT NULL,
`Contact` varchar(20) DEFAULT NULL,
`UserName` varchar(20) NOT NULL,
`Password` varchar(20) NOT NULL,
PRIMARY KEY (`UserId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2 ;
--
-- Dumping data for table `tbl_log`
--
INSERT INTO `tbl_log` (`UserId`, `Name`, `Contact`, `UserName`, `Password`) VALUES
(1, 'Rahul', '7990932674', 'admin', '<PASSWORD>');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_news`
--
CREATE TABLE IF NOT EXISTS `tbl_news` (
`NewsId` int(20) NOT NULL AUTO_INCREMENT,
`NewsTitle` varchar(20) NOT NULL,
`NewsDiscripation` varchar(1000) NOT NULL,
`NewsImg` varchar(20) NOT NULL,
`IsDisplay` varchar(20) NOT NULL,
`NewsDate` varchar(20) NOT NULL,
PRIMARY KEY (`NewsId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=7 ;
--
-- Dumping data for table `tbl_news`
--
INSERT INTO `tbl_news` (`NewsId`, `NewsTitle`, `NewsDiscripation`, `NewsImg`, `IsDisplay`, `NewsDate`) VALUES
(3, 'Times', 'ABC', 'News_16758_3_.jpg', 'Yes', '2019-02-25'),
(4, '<NAME>', 'Get 50% Off On Your First Booking', 'News_76380_4_.jpg', 'Yes', '2019-03-15'),
(5, '<NAME>', 'Get 10% Off On Your Second Booking', 'News_63156_5_.jpg', 'Yes', '2019-03-15'),
(6, 'Royal Enfield', 'Enfield diversified into motor cycles, 1901 and motor cars, 1902. The motor department was put into a separate subsidiary, Enfield Autocar Company Limited incorporated in 1906 and established in new works at Hunt End, Redditch.[10] However Enfield Autocar after just 19 months reported a substantial loss and, aside from Eadie himself, shareholders were unwilling to provide more capital so in early 1907 Eadie sold his control of Eadie Manufacturing to BSA. <NAME> and <NAME> had been appointed directors of BSA before the proposed sale had been put to shareholders. The new combined BSA and Eadie business manufactured "military and sporting rifles, (pedal) cycle and cycle components, motor-cars etc."[11] "BSA and Eadie cycle specialities".[12] But there were still minority Eadie shareholders alongside BSA in 1957.\r\n\r\nThe business of Enfield Autocar, that is to say the plant and stock, was sold to Birmingham''s Alldays & Onions Pneumatic Engineering.[13] Enfield Cycle Compa', 'News_12426_6_.jpg', 'Yes', '2019-03-31');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_rating`
--
CREATE TABLE IF NOT EXISTS `tbl_rating` (
`Rating_Id` int(11) NOT NULL AUTO_INCREMENT,
`BookingId` int(11) NOT NULL,
`User_Id` int(11) NOT NULL,
`Rating` int(2) DEFAULT NULL,
`ReviewText` varchar(500) DEFAULT NULL,
`ReviewDateTime` datetime NOT NULL,
`IsDisplay` varchar(20) DEFAULT NULL,
PRIMARY KEY (`Rating_Id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=2 ;
--
-- Dumping data for table `tbl_rating`
--
INSERT INTO `tbl_rating` (`Rating_Id`, `BookingId`, `User_Id`, `Rating`, `ReviewText`, `ReviewDateTime`, `IsDisplay`) VALUES
(1, 20, 46, 5, 'Maaal', '2019-04-04 00:00:00', 'Yes');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_state`
--
CREATE TABLE IF NOT EXISTS `tbl_state` (
`StateId` int(20) NOT NULL AUTO_INCREMENT,
`StateName` varchar(20) NOT NULL,
`StateImg` varchar(20) DEFAULT NULL,
KEY `Stateid` (`StateId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=54 ;
--
-- Dumping data for table `tbl_state`
--
INSERT INTO `tbl_state` (`StateId`, `StateName`, `StateImg`) VALUES
(48, 'Kashmira', 'State_74989_48_.jpg'),
(50, 'Gujarat', 'State_44311_50_.jpg'),
(51, 'West Bengal', 'State_62345_51_.jpg'),
(52, 'Maharastra', 'State_52745_52_.jpg'),
(53, 'XYZ', 'State_27642_53_.jpg');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_status`
--
CREATE TABLE IF NOT EXISTS `tbl_status` (
`StatusId` int(11) NOT NULL AUTO_INCREMENT,
`UserId` int(20) NOT NULL,
`BookingId` int(20) NOT NULL,
`Remark` varchar(20) NOT NULL,
`Status` varchar(20) NOT NULL,
`LogDate` datetime NOT NULL,
PRIMARY KEY (`StatusId`),
KEY `Userid` (`UserId`),
KEY `UserId_2` (`UserId`),
KEY `BookingId` (`BookingId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=35 ;
--
-- Dumping data for table `tbl_status`
--
INSERT INTO `tbl_status` (`StatusId`, `UserId`, `BookingId`, `Remark`, `Status`, `LogDate`) VALUES
(34, 57, 21, 'Status is Approved', 'New', '2019-04-04 15:08:20');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_type`
--
CREATE TABLE IF NOT EXISTS `tbl_type` (
`TypeId` int(20) NOT NULL AUTO_INCREMENT,
`TypeName` varchar(20) NOT NULL,
PRIMARY KEY (`TypeId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=5 ;
--
-- Dumping data for table `tbl_type`
--
INSERT INTO `tbl_type` (`TypeId`, `TypeName`) VALUES
(1, 'Car'),
(2, 'Bike'),
(3, 'Cycle'),
(4, 'Auto');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_userdetails`
--
CREATE TABLE IF NOT EXISTS `tbl_userdetails` (
`UserId` int(11) NOT NULL AUTO_INCREMENT,
`FirstName` varchar(20) NOT NULL,
`LastName` varchar(20) NOT NULL,
`PhoneNumber` bigint(20) NOT NULL,
`EmailId` varchar(100) NOT NULL,
`Password` varchar(10) NOT NULL,
`RegistrationDate` date DEFAULT NULL,
`OtpCode` int(11) DEFAULT NULL,
`IsVerify` varchar(10) DEFAULT NULL,
`CityId` int(11) DEFAULT NULL,
`UserType` varchar(100) DEFAULT NULL,
`AgencyName` varchar(100) DEFAULT NULL,
`AgencyAddress` varchar(100) DEFAULT NULL,
`LandMark` varchar(100) DEFAULT NULL,
`PinCode` int(20) DEFAULT NULL,
`GstNo` varchar(20) DEFAULT NULL,
`AgencyPhoneNumber` int(10) DEFAULT NULL,
`AgencyEmailId` varchar(100) DEFAULT NULL,
`CertificateImage` varchar(100) DEFAULT NULL,
`PersonalImage` varchar(100) DEFAULT NULL,
`SignatureImage` varchar(100) DEFAULT NULL,
`Latitude` varchar(50) DEFAULT NULL,
`Longitude` varchar(50) DEFAULT NULL,
PRIMARY KEY (`UserId`),
KEY `AgencyName` (`AgencyName`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=58 ;
--
-- Dumping data for table `tbl_userdetails`
--
INSERT INTO `tbl_userdetails` (`UserId`, `FirstName`, `LastName`, `PhoneNumber`, `EmailId`, `Password`, `RegistrationDate`, `OtpCode`, `IsVerify`, `CityId`, `UserType`, `AgencyName`, `AgencyAddress`, `LandMark`, `PinCode`, `GstNo`, `AgencyPhoneNumber`, `AgencyEmailId`, `CertificateImage`, `PersonalImage`, `SignatureImage`, `Latitude`, `Longitude`) VALUES
(55, 'Rajan', 'Maurya', 8980957789, '<EMAIL>', '123', '2019-04-04', 3823, NULL, 7, 'Normal', NULL, '(B-86),Hira Nagar Socity, Parvat Patiya', NULL, 395010, NULL, NULL, NULL, NULL, 'Profile_16134_55_.jpg', NULL, '21.17313909459827', '72.83433888629793'),
(57, 'Seema', 'Pandey', 6353470335, '<EMAIL>', '123', '2019-04-04', 1997, NULL, 7, 'Normal', NULL, 'Pata Nahi', NULL, 395010, NULL, NULL, NULL, NULL, NULL, NULL, '21.10535940853266', '72.85596934331306');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_vehicle`
--
CREATE TABLE IF NOT EXISTS `tbl_vehicle` (
`VehicleId` int(11) NOT NULL AUTO_INCREMENT,
`UserId` int(20) NOT NULL,
`VersionId` int(20) NOT NULL,
`Seats` int(11) DEFAULT NULL,
`Transmission` varchar(20) DEFAULT NULL,
`Doors` int(11) DEFAULT NULL,
`AC` varchar(10) DEFAULT NULL,
`Descripation` varchar(100) NOT NULL,
`VehicleImg` varchar(100) NOT NULL,
`RCNo` int(11) NOT NULL,
`RCImg` varchar(100) NOT NULL,
`PucImg` varchar(100) DEFAULT NULL,
`InsuranceImg` varchar(100) DEFAULT NULL,
`Price` int(11) NOT NULL,
`TypeId` int(11) DEFAULT NULL,
`Status` varchar(20) DEFAULT NULL,
PRIMARY KEY (`VehicleId`),
KEY `Typeid` (`TypeId`),
KEY `Userid` (`UserId`),
KEY `TypeId_2` (`TypeId`),
KEY `TypeId_3` (`TypeId`),
KEY `VersionId` (`VersionId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=32 ;
--
-- Dumping data for table `tbl_vehicle`
--
INSERT INTO `tbl_vehicle` (`VehicleId`, `UserId`, `VersionId`, `Seats`, `Transmission`, `Doors`, `AC`, `Descripation`, `VehicleImg`, `RCNo`, `RCImg`, `PucImg`, `InsuranceImg`, `Price`, `TypeId`, `Status`) VALUES
(31, 55, 45, 4, 'Gear/Auto', 5, 'Ac', 'Powering the Mini 3-door Cooper S and the Convertible Cooper S is a 2.0 litre, 4 cylinder, twin powe', 'Vehicle_97406_31_.jpg', 987654321, 'Vehicle_67483_31_.jpg', 'Vehicle_93321_31_.JPG', 'Vehicle_91631_31_.jpg', 5000, 1, 'Approved');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_vehicleimage`
--
CREATE TABLE IF NOT EXISTS `tbl_vehicleimage` (
`ImageId` int(11) NOT NULL AUTO_INCREMENT,
`VehicleId` int(11) NOT NULL,
`ImageUrl` varchar(40) NOT NULL,
PRIMARY KEY (`ImageId`),
KEY `VehicleId` (`VehicleId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=3 ;
--
-- Dumping data for table `tbl_vehicleimage`
--
-- --------------------------------------------------------
--
-- Table structure for table `tbl_version`
--
CREATE TABLE IF NOT EXISTS `tbl_version` (
`VersionId` int(20) NOT NULL AUTO_INCREMENT,
`CompanyId` int(20) NOT NULL,
`VersionName` varchar(20) NOT NULL,
PRIMARY KEY (`VersionId`),
KEY `Companyid` (`CompanyId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=59 ;
--
-- Dumping data for table `tbl_version`
--
INSERT INTO `tbl_version` (`VersionId`, `CompanyId`, `VersionName`) VALUES
(11, 52, 'EcoSport'),
(12, 52, 'Figo'),
(13, 52, 'Endeavour'),
(14, 52, 'Freestyle'),
(15, 52, 'Mustang'),
(16, 52, 'Aspire'),
(17, 46, 'X1'),
(18, 46, 'B M W 3 Series'),
(19, 46, 'B M W X5'),
(20, 46, 'B M W 5 Series'),
(21, 46, 'B M W X3'),
(22, 46, 'B M W 7 Series'),
(23, 46, 'B M W X4'),
(24, 46, 'B M W X6'),
(25, 46, 'B M W 6 Series '),
(26, 46, 'B M W M Series'),
(27, 46, 'B M W M2'),
(28, 46, 'B M W 3 Series GT'),
(29, 48, '<NAME>'),
(30, 48, '<NAME>'),
(31, 48, '<NAME>'),
(32, 48, '<NAME>'),
(33, 48, '<NAME>'),
(34, 48, '<NAME>'),
(35, 48, '<NAME>'),
(36, 48, '<NAME>'),
(37, 48, '<NAME>'),
(38, 48, '<NAME>'),
(39, 48, '<NAME>let'),
(40, 48, '<NAME>'),
(41, 48, '<NAME>'),
(42, 50, 'Mini Cooper Converti'),
(43, 50, 'Mini Cooper Countrym'),
(44, 50, '<NAME>'),
(45, 50, 'Mini Cooper 5 DOOR'),
(46, 50, 'Mini Cooper 3 DOOR'),
(47, 51, 'Creta'),
(48, 51, 'Hyundai Creta'),
(49, 51, 'Hyundai Elite i20'),
(50, 51, 'Hyundai Grand i10'),
(51, 51, '<NAME>'),
(52, 51, '<NAME>'),
(53, 51, '<NAME>'),
(54, 51, '<NAME>'),
(55, 51, '<NAME>'),
(56, 51, '<NAME>'),
(57, 51, 'Hyundai i20 Active'),
(58, 53, 'Clasic 350');
-- --------------------------------------------------------
--
-- Table structure for table `tbl_wallet`
--
CREATE TABLE IF NOT EXISTS `tbl_wallet` (
`WalletId` int(11) NOT NULL AUTO_INCREMENT,
`UserId` int(11) NOT NULL,
`Type` varchar(10) NOT NULL,
`Amount` int(11) NOT NULL,
`DateTime` datetime NOT NULL,
`Remark` varchar(100) NOT NULL,
PRIMARY KEY (`WalletId`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=36 ;
--
-- Dumping data for table `tbl_wallet`
--
INSERT INTO `tbl_wallet` (`WalletId`, `UserId`, `Type`, `Amount`, `DateTime`, `Remark`) VALUES
(5, 45, 'Credit', 50, '2019-03-29 09:21:04', 'New User'),
(8, 45, 'Credit', 3500, '2019-03-31 03:34:31', 'Vehicle Submited'),
(9, 45, 'Credit', 50000, '2019-03-31 12:14:37', 'Vehicle Submited'),
(10, 45, 'Credit', 50, '2019-03-31 12:15:08', 'Vehicle Submited'),
(11, 46, 'Credit', 50, '2019-04-01 04:22:07', 'Vehicle Submited'),
(12, 47, 'Credit', 50, '2019-04-01 04:23:37', 'New User'),
(13, 48, 'Credit', 50, '2019-04-02 06:15:48', 'New User'),
(19, 49, 'Credit', 50, '2019-04-03 09:44:42', 'New User'),
(20, 50, 'Credit', 50, '2019-04-03 09:45:29', 'New User'),
(21, 51, 'Credit', 50, '2019-04-03 15:44:20', 'New User'),
(22, 52, 'Credit', 50, '2019-04-03 15:46:57', 'New User'),
(23, 53, 'Credit', 50, '2019-04-03 15:50:06', 'New User'),
(24, 45, 'Credit', 16000, '2019-04-03 16:09:44', 'Vehicle Submited'),
(25, 47, 'Credit', 50, '2019-04-04 05:18:14', 'New User'),
(26, 48, 'Credit', 50, '2019-04-04 05:20:19', 'New User'),
(27, 49, 'Credit', 50, '2019-04-04 05:22:28', 'New User'),
(28, 50, 'Credit', 50, '2019-04-04 05:38:53', 'New User'),
(29, 51, 'Credit', 50, '2019-04-04 05:40:15', 'New User'),
(30, 52, 'Credit', 50, '2019-04-04 09:26:38', 'New User'),
(31, 53, 'Credit', 50, '2019-04-04 09:27:27', 'New User'),
(32, 54, 'Credit', 50, '2019-04-04 09:29:11', 'New User'),
(33, 55, 'Credit', 50, '2019-04-04 13:53:54', 'New User'),
(34, 56, 'Credit', 50, '2019-04-04 15:01:09', 'New User'),
(35, 57, 'Credit', 50, '2019-04-04 15:02:55', 'New User');
--
-- Constraints for dumped tables
--
--
-- Constraints for table `tbl_booking`
--
ALTER TABLE `tbl_booking`
ADD CONSTRAINT `tbl_booking_ibfk_1` FOREIGN KEY (`UserId`) REFERENCES `tbl_userdetails` (`UserId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_company`
--
ALTER TABLE `tbl_company`
ADD CONSTRAINT `tbl_company_ibfk_1` FOREIGN KEY (`TypeId`) REFERENCES `tbl_type` (`TypeId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_faq`
--
ALTER TABLE `tbl_faq`
ADD CONSTRAINT `tbl_faq_ibfk_1` FOREIGN KEY (`FaqCatId`) REFERENCES `tbl_faqcat` (`FaqCatId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_status`
--
ALTER TABLE `tbl_status`
ADD CONSTRAINT `tbl_status_ibfk_1` FOREIGN KEY (`UserId`) REFERENCES `tbl_userdetails` (`UserId`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `tbl_status_ibfk_2` FOREIGN KEY (`BookingId`) REFERENCES `tbl_booking` (`BookingId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_vehicle`
--
ALTER TABLE `tbl_vehicle`
ADD CONSTRAINT `tbl_vehicle_ibfk_1` FOREIGN KEY (`TypeId`) REFERENCES `tbl_type` (`TypeId`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `tbl_vehicle_ibfk_2` FOREIGN KEY (`UserId`) REFERENCES `tbl_userdetails` (`UserId`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `tbl_vehicle_ibfk_4` FOREIGN KEY (`VersionId`) REFERENCES `tbl_version` (`VersionId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_vehicleimage`
--
ALTER TABLE `tbl_vehicleimage`
ADD CONSTRAINT `tbl_vehicleimage_ibfk_1` FOREIGN KEY (`VehicleId`) REFERENCES `tbl_vehicle` (`VehicleId`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Constraints for table `tbl_version`
--
ALTER TABLE `tbl_version`
ADD CONSTRAINT `tbl_version_ibfk_1` FOREIGN KEY (`CompanyId`) REFERENCES `tbl_company` (`CompanyId`) ON DELETE CASCADE ON UPDATE CASCADE;
|
CREATE View property.ExtendedProperty_Repo2Sys_level2_RepoObjectColumn
As
Select
prop.property_name
, property_value = Cast(prop.property_value As NVarchar(4000))
, level0type = N'Schema'
, level0name = ro_parent.RepoObject_schema_name
, lev_parent.level1type
, level1name = ro_parent.RepoObject_name
, level2type = N'COLUMN'
, level2name = roc.RepoObjectColumn_name
, prop.RepoObjectColumn_guid
, roc.Repo_user_type_fullname
, parent_RepoObject_guid = ro_parent.RepoObject_guid
, parent_RepoObject_type = ro_parent.RepoObject_type
From
property.RepoObjectColumnProperty As prop
Inner Join
repo.RepoObjectColumn As roc
On
roc.RepoObjectColumn_guid = prop.RepoObjectColumn_guid
Inner Join
repo.RepoObject As ro_parent
On
ro_parent.RepoObject_guid = roc.RepoObject_guid
Inner Join
configT.type_level1type_level2type As lev_parent
On
lev_parent.type = ro_parent.RepoObject_type
Where
roc.is_RepoObjectColumn_name_uniqueidentifier = 0
--SchemaCompare has issues comparing extended properties for graph table columns, we need to exclude them
And roc.Repo_graph_type Is Null
Go
Execute sp_addextendedproperty
@name = N'RepoObject_guid'
, @value = '57b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '94b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'RepoObjectColumn_guid';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '95b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'Repo_user_type_fullname';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '8db33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'property_value';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '8cb33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'property_name';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '97b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'parent_RepoObject_type';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '96b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'parent_RepoObject_guid';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '92b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level2type';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '93b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level2name';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '90b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level1type';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '91b33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level1name';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '8eb33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level0type';
Go
Execute sp_addextendedproperty
@name = N'RepoObjectColumn_guid'
, @value = '8fb33a4a-426d-eb11-84e2-a81e8446d5b0'
, @level0type = N'SCHEMA'
, @level0name = N'property'
, @level1type = N'VIEW'
, @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn'
, @level2type = N'COLUMN'
, @level2name = N'level0name';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectList', @value = N'* [configT].[type_level1type_level2type]
* [property].[RepoObjectColumnProperty]
* [repo].[RepoObject]
* [repo].[RepoObjectColumn]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'pk_IndexPatternColumnName', @value = N'level0type,level0name,level1type,level1name,level2type,level2name,property_name', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'pk_IndexPatternColumnDatatype', @value = N'nvarchar(6),nvarchar(128),varchar(9),nvarchar(128),nvarchar(6),nvarchar(128),nvarchar(128)', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'pk_index_guid', @value = N'D7B7CBB0-0596-EB11-84F4-A81E8446D5B0', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'AntoraReferencingList', @value = N'* xref:property.ExtendedProperty_Repo2Sys_level2_Union.adoc[]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'AntoraReferencedList', @value = N'* xref:configT.type_level1type_level2type.adoc[]
* xref:property.RepoObjectColumnProperty.adoc[]
* xref:repo.RepoObject.adoc[]
* xref:repo.RepoObjectColumn.adoc[]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [property].[RepoObjectColumnProperty].[RepoObjectColumn_guid]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'RepoObjectColumn_guid';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObjectColumn].[Repo_user_type_fullname]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'Repo_user_type_fullname';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [property].[RepoObjectColumnProperty].[property_value]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'property_value';
GO
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [property].[RepoObjectColumnProperty].[property_name]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'property_name';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObject].[RepoObject_type]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'parent_RepoObject_type';
GO
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObject].[RepoObject_guid]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'parent_RepoObject_guid';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObjectColumn].[RepoObjectColumn_name]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'level2name';
GO
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [configT].[type_level1type_level2type].[level1type]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'level1type';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObject].[RepoObject_name]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'level1name';
GO
EXECUTE sp_addextendedproperty @name = N'ReferencedObjectColumnList', @value = N'* [repo].[RepoObject].[RepoObject_schema_name]', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn', @level2type = N'COLUMN', @level2name = N'level0name';
GO
EXECUTE sp_addextendedproperty @name = N'is_ssas', @value = N'0', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
GO
EXECUTE sp_addextendedproperty @name = N'is_repo_managed', @value = N'0', @level0type = N'SCHEMA', @level0name = N'property', @level1type = N'VIEW', @level1name = N'ExtendedProperty_Repo2Sys_level2_RepoObjectColumn';
|
<gh_stars>1-10
CREATE EVENT SESSION [SQLWATCH_long_queries] ON SERVER
/* any query that waited over 1 second for a resource */
--ADD EVENT sqlos.wait_info(
-- ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
-- WHERE ([package0].[greater_than_uint64]([duration],(1000)) AND [package0].[equal_uint64]([opcode],(1)) AND [sqlserver].[sql_text]<>N'')),
--ADD EVENT sqlos.wait_info_external(
-- ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
-- WHERE ([package0].[greater_than_uint64]([duration],(1000)) AND [package0].[equal_uint64]([opcode],(1)) AND [sqlserver].[sql_text]<>N'')),
/* any query that ran for longer than 15 seconds
Remove when targeting SQL2008 */
ADD EVENT sqlserver.module_end(SET collect_statement=(1)
ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
WHERE ([package0].[greater_than_uint64]([duration],(15000000)) AND [sqlserver].[is_system]=(0))),
ADD EVENT sqlserver.rpc_completed(SET collect_statement=(1)
ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
WHERE ([package0].[greater_than_uint64]([duration],(15000000)) AND ([result]=(2) OR [package0].[greater_than_uint64]([logical_reads],(0)) OR [package0].[greater_than_uint64]([cpu_time],(0)) OR [package0].[greater_than_uint64]([physical_reads],(0)) OR [package0].[greater_than_uint64]([writes],(0))) AND [package0].[equal_boolean]([sqlserver].[is_system],(0)))),
ADD EVENT sqlserver.sp_statement_completed(
ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
WHERE ([package0].[greater_than_int64]([duration],(15000000)) AND ([package0].[greater_than_uint64]([cpu_time],(0)) OR [package0].[greater_than_uint64]([logical_reads],(0)) OR [package0].[greater_than_uint64]([physical_reads],(0)) OR [package0].[greater_than_uint64]([writes],(0))) AND [sqlserver].[is_system]=(0))),
ADD EVENT sqlserver.sql_statement_completed(
ACTION(sqlserver.client_app_name,sqlserver.client_hostname,sqlserver.database_name,sqlserver.plan_handle,sqlserver.session_id,sqlserver.session_nt_username,sqlserver.sql_text,sqlserver.tsql_stack,sqlserver.username)
WHERE ([package0].[greater_than_int64]([duration],(15000000)) AND ([package0].[greater_than_uint64]([cpu_time],(0)) OR [package0].[greater_than_uint64]([logical_reads],(0)) OR [package0].[greater_than_uint64]([physical_reads],(0)) OR [package0].[greater_than_uint64]([writes],(0))) AND [sqlserver].[is_system]=(0)))
--ADD TARGET package0.ring_buffer(SET max_events_limit=(250))
ADD TARGET package0.event_file(SET filename='SQLWATCH_long_queries.xel', max_file_size=(5), max_rollover_files=(0))
WITH (MAX_MEMORY=4096 KB,EVENT_RETENTION_MODE=ALLOW_SINGLE_EVENT_LOSS,MAX_DISPATCH_LATENCY=5 SECONDS,MAX_EVENT_SIZE=0 KB,MEMORY_PARTITION_MODE=NONE,TRACK_CAUSALITY=ON,STARTUP_STATE=ON)
GO |
<filename>sql-file.sql
CREATE DATABASE IF NOT EXISTS user;
USE user;
CREATE TABLE IF NOT EXISTS `users` (
user_id int(11) NOT NULL AUTO_INCREMENT,
user_email varchar(50) NOT NULL,
user_pass varchar(32) NOT NULL,
user_fname varchar(50) NOT NULL,
CONSTRAINT PK_user_id PRIMARY KEY(user_id),
CONSTRAINT UK_user_email UNIQUE(user_email)
);
INSERT INTO users VALUES(1, '<EMAIL>', MD5('123456'), '<NAME>'); |
<filename>Patches/Live_1/Raw_SQL/BALANCE_nuimqol_rmCrit.sql<gh_stars>1-10
USE [perpetuumsa]
GO
------------Nuimqol: remove crit--------
PRINT N'Nuimqol: remove crit';
DECLARE @definitionID int;
DECLARE @extensionID int;
DECLARE @aggfieldID int;
DECLARE @chassisbonusID int;
PRINT N'def_mesmer_chassis';
SET @extensionID = (SELECT TOP 1 extensionid from dbo.extensions WHERE extensionname = 'ext_heavyarmored_unit_piloting');
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_mesmer_chassis' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_mesmer_chassis_pr';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_mesmer_chassis_pr' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_kain_chassis';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_kain_chassis' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_kain_chassis_pr';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_kain_chassis_pr' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_arbalest_chassis_';
SET @extensionID = (SELECT TOP 1 extensionid from dbo.extensions WHERE extensionname = 'ext_lightarmored_unit_piloting');
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_arbalest_chassis' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_arbalest_chassis_pr';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_arbalest_chassis_pr' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_yagel_chassis';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_yagel_chassis' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
PRINT N'def_yagel_chassis_pr';
SET @definitionID = (SELECT TOP 1 definition from entitydefaults WHERE [definitionname] = 'def_yagel_chassis_pr' ORDER BY definition DESC);
SET @aggfieldID = (SELECT TOP 1 id from aggregatefields WHERE[name] = 'critical_hit_chance_modifier' ORDER BY [name] DESC);
SET @chassisbonusID = (SELECT TOP 1 id from chassisbonus WHERE[definition] = @definitionID AND [extension] = @extensionID AND [targetpropertyID] = @aggfieldID ORDER BY [definition], [extension], [targetpropertyID] DESC);
DELETE [dbo].[chassisbonus] WHERE id=@chassisbonusID;
GO
|
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
DROP TABLE IF EXISTS `accounts`;
CREATE TABLE `accounts` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`ac_id` varchar(100) COLLATE utf8_unicode_ci NOT NULL,
`name` varchar(100) COLLATE utf8_unicode_ci NOT NULL DEFAULT '',
`address` varchar(255) COLLATE utf8_unicode_ci DEFAULT '',
`city` varchar(100) COLLATE utf8_unicode_ci DEFAULT 'user',
`phone_1` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL,
`phone_2` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL,
`profile` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`email` varchar(199) COLLATE utf8_unicode_ci DEFAULT NULL,
`website` varchar(199) COLLATE utf8_unicode_ci DEFAULT NULL,
`type` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL COMMENT 'User id',
`created_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `cartage`;
CREATE TABLE `cartage` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`account` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Account Id',
`ledger` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`debit` double DEFAULT '0',
`credit` double DEFAULT '0',
`description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `documents`;
CREATE TABLE `documents` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`reg_no` varchar(100) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Account Id',
`passing` date DEFAULT '0000-00-00',
`dip_chart` date DEFAULT '0000-00-00',
`route` date DEFAULT NULL,
`token` date DEFAULT NULL,
`insurance` date DEFAULT NULL,
`explosive` date DEFAULT NULL,
`tracker` date DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `general`;
CREATE TABLE `general` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`account` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Account Id',
`debit` double DEFAULT '0',
`credit` double DEFAULT '0',
`description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `ledger`;
CREATE TABLE `ledger` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`credit` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Credit Account Id',
`total` double NOT NULL DEFAULT '0',
`debit` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Credit Account Id',
`description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `profits`;
CREATE TABLE `profits` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`account` varchar(255) COLLATE utf8_unicode_ci NOT NULL COMMENT 'Account Id',
`debit` double DEFAULT '0',
`credit` double DEFAULT '0',
`description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `purchases`;
CREATE TABLE `purchases` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`product` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`quantity` bigint(20) NOT NULL DEFAULT '1',
`price` double DEFAULT '0',
`total` double DEFAULT '0',
`supplier` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`terminal` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`reg_no` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `sales`;
CREATE TABLE `sales` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`product` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`quantity` bigint(20) NOT NULL DEFAULT '1',
`price` double DEFAULT '0',
`total` double DEFAULT '0',
`customer` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`reg_no` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL,
`is_changed` tinyint(4) DEFAULT '1',
`created_by` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL COMMENT 'User Id',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
DROP TABLE IF EXISTS `settings`;
CREATE TABLE `settings` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`value` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
INSERT INTO `settings` (`id`, `name`, `value`) VALUES
('SETTING554d6fc3f2671c53cce57d59dc168f65fe3d8df50a0e9fc65c1a3eae3d19f23d2019020617324315494563637715133', 'last_server_sync', '2019-01-14 05:54:14');
DROP TABLE IF EXISTS `users`;
CREATE TABLE `users` (
`id` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`name` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`user_name` varchar(255) COLLATE utf8_unicode_ci DEFAULT '',
`password` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`profile` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
`role` varchar(255) COLLATE utf8_unicode_ci DEFAULT 'user',
`permissions` text COLLATE utf8_unicode_ci,
`is_changed` tinyint(4) DEFAULT '1',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci ROW_FORMAT=COMPACT;
INSERT INTO `users` (`id`, `name`, `user_name`, `password`, `profile`, `role`, `permissions`, `is_changed`, `created_at`, `updated_at`, `deleted_at`) VALUES('USER<PASSWORD>2431549<PASSWORD>3637715111', '<NAME>', '<PASSWORD>an', 'e10adc3949ba59abbe56e057f20f883e', NULL, '21232f297a57a5a743894a0e4a801fc3', '{}', 0, '2018-12-29 01:55:52', '2019-02-25 19:05:00', NULL);
ALTER TABLE `accounts`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `accounts_id_unique` (`id`) USING BTREE;
ALTER TABLE `cartage`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `cartage_id_unique` (`id`) USING BTREE;
ALTER TABLE `documents`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `documents_id_unique` (`id`) USING BTREE;
ALTER TABLE `general`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `general_id_unique` (`id`) USING BTREE;
ALTER TABLE `ledger`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `ledger_id_unique` (`id`) USING BTREE;
ALTER TABLE `profits`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `profit_id_unique` (`id`) USING BTREE;
ALTER TABLE `purchases`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `purchase_id_unique` (`id`) USING BTREE;
ALTER TABLE `sales`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `sale_id_unique` (`id`) USING BTREE;
ALTER TABLE `settings`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `setting_id_unique` (`id`) USING BTREE;
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_id_unique` (`id`) USING BTREE,
ADD UNIQUE KEY `users_name_unique` (`user_name`) USING BTREE;
COMMIT; |
<filename>migrations/20200503110153_uspSaveCalc.up.sql
create or alter proc dbo.uspSaveCalc @Profit decimal(9, 5),
@FirstName varchar(1000),
@SecondName varchar(1000),
@LowerWinIndex tinyint,
@HigherWinIndex tinyint,
@FirstIndex tinyint,
@SecondIndex tinyint,
@WinDiff decimal(9, 5),
@WinDiffRel decimal(9, 5),
@FortedSurebetId bigint,
@SurebetId bigint,
@Roi int
as
begin
set nocount on
declare @Id bigint
select @Id = SurebetId from dbo.Calc where SurebetId = @SurebetId
if @@rowcount = 0
insert into dbo.Calc(Profit, FirstName, SecondName, LowerWinIndex, HigherWinIndex, FirstIndex, SecondIndex,
WinDiff, WinDiffRel, FortedSurebetId, SurebetId, Roi)
output inserted.SurebetId
values (@Profit, @FirstName, @SecondName, @LowerWinIndex, @HigherWinIndex, @FirstIndex, @SecondIndex, @WinDiff,
@WinDiffRel, @FortedSurebetId, @SurebetId, @Roi)
else
select @Id
end |
<filename>backend/model/src/main/resources/db/sampleInserts_PS.sql
INSERT INTO propertysearcher.customer(id, paymentmethods, files, location, created, updated, paymentdetails, pricemultiplier)
VALUES (2000001,ARRAY ['DEFAULT'] :: shared.PAYMENTMETHOD [],'[]','DE',NOW(),NOW(),'{}',1.5);
INSERT INTO propertysearcher."user" (id, email, password, customer_id, enabled, expired, locked, status, lastlogin, created, updated, profile, address, type)
VALUES (2000002, '<EMAIL>', null, 2000001, true,false,false,'REGISTERED', NOW(), NOW(), NOW(), '{}', '{}', 'APPLICANT');
|
<filename>src/test/resources/dbmigration/migrationtest/db2/1.1.sql
-- Migrationscripts for ebean unittest
-- apply changes
create table migtest_e_user (
id integer generated by default as identity not null,
constraint pk_migtest_e_user primary key (id)
);
create table migtest_mtm_c_migtest_mtm_m (
migtest_mtm_c_id integer not null,
migtest_mtm_m_id bigint not null,
constraint pk_migtest_mtm_c_migtest_mtm_m primary key (migtest_mtm_c_id,migtest_mtm_m_id)
);
create table migtest_mtm_m_migtest_mtm_c (
migtest_mtm_m_id bigint not null,
migtest_mtm_c_id integer not null,
constraint pk_migtest_mtm_m_migtest_mtm_c primary key (migtest_mtm_m_id,migtest_mtm_c_id)
);
alter table migtest_ckey_detail add column one_key integer;
alter table migtest_ckey_detail add column two_key varchar(127);
alter table migtest_ckey_detail add constraint fk_mgtst_ck_e1qkb5 foreign key (one_key,two_key) references migtest_ckey_parent (one_key,two_key) on delete restrict;
alter table migtest_ckey_parent add column assoc_id integer;
alter table migtest_fk_cascade drop constraint fk_mgtst_fk_65kf6l;
alter table migtest_fk_cascade add constraint fk_mgtst_fk_65kf6l foreign key (one_id) references migtest_fk_cascade_one (id) on delete restrict;
alter table migtest_fk_none add constraint fk_mgtst_fk_nn_n_d foreign key (one_id) references migtest_fk_one (id) on delete restrict;
alter table migtest_fk_none_via_join add constraint fk_mgtst_fk_9tknzj foreign key (one_id) references migtest_fk_one (id) on delete restrict;
alter table migtest_fk_set_null drop constraint fk_mgtst_fk_wicx8x;
alter table migtest_fk_set_null add constraint fk_mgtst_fk_wicx8x foreign key (one_id) references migtest_fk_one (id) on delete restrict;
alter table migtest_e_basic drop constraint uq_mgtst__b_4aybzy;
alter table migtest_e_basic drop constraint uq_mgtst__b_4ayc02;
update migtest_e_basic set status = 'A' where status is null;
alter table migtest_e_basic drop constraint ck_mgtst__bsc_stts;
alter table migtest_e_basic alter column status set default 'A';
alter table migtest_e_basic alter column status set not null;
alter table migtest_e_basic add constraint ck_mgtst__bsc_stts check ( status in ('N','A','I','?'));
alter table migtest_e_basic drop constraint ck_mgtst__b_z543fg;
alter table migtest_e_basic alter column status2 varchar(127);
alter table migtest_e_basic alter column status2 drop default;
alter table migtest_e_basic alter column status2 set null;
-- rename all collisions;
-- NOT SUPPORTED alter table migtest_e_basic add constraint uq_mgtst__b_vs45xo unique (description);
insert into migtest_e_user (id) select distinct user_id from migtest_e_basic;
alter table migtest_e_basic add constraint fk_mgtst__bsc_sr_d foreign key (user_id) references migtest_e_user (id) on delete restrict;
alter table migtest_e_basic alter column user_id set null;
alter table migtest_e_basic add column new_string_field varchar(255) default 'foo''bar' not null;
alter table migtest_e_basic add column new_boolean_field boolean default true not null;
update migtest_e_basic set new_boolean_field = old_boolean;
alter table migtest_e_basic add column new_boolean_field2 boolean default true not null;
alter table migtest_e_basic add column progress integer default 0 not null;
alter table migtest_e_basic add constraint ck_mgtst__b_l39g41 check ( progress in (0,1,2));
alter table migtest_e_basic add column new_integer integer default 42 not null;
-- NOT SUPPORTED alter table migtest_e_basic add constraint uq_mgtst__b_ucfcne unique (status,indextest1);
-- NOT SUPPORTED alter table migtest_e_basic add constraint uq_mgtst__bsc_nm unique (name);
-- NOT SUPPORTED alter table migtest_e_basic add constraint uq_mgtst__b_4ayc00 unique (indextest4);
-- NOT SUPPORTED alter table migtest_e_basic add constraint uq_mgtst__b_4ayc01 unique (indextest5);
alter table migtest_e_enum drop constraint ck_mgtst__n_773sok;
comment on column migtest_e_history.test_string is 'Column altered to long now';
alter table migtest_e_history alter column test_string bigint;
comment on table migtest_e_history is 'We have history now';
-- NOTE: table has @History - special migration may be necessary
update migtest_e_history2 set test_string = 'unknown' where test_string is null;
alter table migtest_e_history2 alter column test_string set default 'unknown';
alter table migtest_e_history2 alter column test_string set not null;
alter table migtest_e_history2 add column test_string2 varchar(255);
alter table migtest_e_history2 add column test_string3 varchar(255) default 'unknown' not null;
alter table migtest_e_history2 add column new_column varchar(20);
alter table migtest_e_history4 alter column test_number bigint;
alter table migtest_e_history5 add column test_boolean boolean default false not null;
-- NOTE: table has @History - special migration may be necessary
update migtest_e_history6 set test_number1 = 42 where test_number1 is null;
alter table migtest_e_history6 alter column test_number1 set default 42;
alter table migtest_e_history6 alter column test_number1 set not null;
alter table migtest_e_history6 alter column test_number2 set null;
alter table migtest_e_index1 alter column string1 varchar(20);
alter table migtest_e_index1 alter column string2 varchar(20);
alter table migtest_e_index2 alter column string1 varchar(20);
alter table migtest_e_index2 alter column string2 varchar(20);
alter table migtest_e_index3 alter column string1 varchar(20);
alter table migtest_e_index3 alter column string2 varchar(20);
alter table migtest_e_index4 alter column string1 varchar(20);
alter table migtest_e_index4 alter column string2 varchar(20);
alter table migtest_e_index5 alter column string1 varchar(20);
alter table migtest_e_index5 alter column string2 varchar(20);
alter table migtest_e_index6 drop constraint uq_mgtst__n_1aoskk;
alter table migtest_e_index6 alter column string1 varchar(20);
alter table migtest_e_index6 alter column string2 varchar(20);
alter table migtest_e_softdelete add column deleted boolean default false not null;
alter table migtest_oto_child add column master_id bigint;
create index ix_mgtst__b_eu8css on migtest_e_basic (indextest3);
create index ix_mgtst__b_eu8csv on migtest_e_basic (indextest6);
drop index ix_mgtst__b_eu8csq;
drop index ix_mgtst__b_eu8csu;
drop index ix_mgtst__ndx5;
drop index ix_mgtst__n_r52a9e;
create index ix_mgtst_mt_3ug4ok on migtest_mtm_c_migtest_mtm_m (migtest_mtm_c_id);
alter table migtest_mtm_c_migtest_mtm_m add constraint fk_mgtst_mt_93awga foreign key (migtest_mtm_c_id) references migtest_mtm_c (id) on delete restrict;
create index ix_mgtst_mt_3ug4ou on migtest_mtm_c_migtest_mtm_m (migtest_mtm_m_id);
alter table migtest_mtm_c_migtest_mtm_m add constraint fk_mgtst_mt_93awgk foreign key (migtest_mtm_m_id) references migtest_mtm_m (id) on delete restrict;
create index ix_mgtst_mt_b7nbcu on migtest_mtm_m_migtest_mtm_c (migtest_mtm_m_id);
alter table migtest_mtm_m_migtest_mtm_c add constraint fk_mgtst_mt_ggi34k foreign key (migtest_mtm_m_id) references migtest_mtm_m (id) on delete restrict;
create index ix_mgtst_mt_b7nbck on migtest_mtm_m_migtest_mtm_c (migtest_mtm_c_id);
alter table migtest_mtm_m_migtest_mtm_c add constraint fk_mgtst_mt_ggi34a foreign key (migtest_mtm_c_id) references migtest_mtm_c (id) on delete restrict;
create index ix_mgtst_ck_x45o21 on migtest_ckey_parent (assoc_id);
alter table migtest_ckey_parent add constraint fk_mgtst_ck_da00mr foreign key (assoc_id) references migtest_ckey_assoc (id) on delete restrict;
alter table migtest_oto_child add constraint fk_mgtst_t__csyl38 foreign key (master_id) references migtest_oto_master (id) on delete restrict;
|
<gh_stars>10-100
-- MySQL dump 10.13 Distrib 5.5.42, for debian-linux-gnu (x86_64)
--
-- Host: localhost Database: oc_testing
-- ------------------------------------------------------
-- Server version 5.5.42-1
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_activity`
--
DROP TABLE IF EXISTS `oc8_activity`;
CREATE TABLE IF NOT EXISTS `oc8_activity` (
`activity_id` int(11) NOT NULL,
`timestamp` int(11) NOT NULL DEFAULT '0',
`priority` int(11) NOT NULL DEFAULT '0',
`type` varchar(255) COLLATE utf8_bin NOT NULL,
`user` varchar(64) COLLATE utf8_bin NOT NULL,
`affecteduser` varchar(64) COLLATE utf8_bin NOT NULL,
`app` varchar(255) COLLATE utf8_bin NOT NULL,
`subject` varchar(255) COLLATE utf8_bin NOT NULL,
`subjectparams` varchar(255) COLLATE utf8_bin NOT NULL,
`message` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`messageparams` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`file` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`link` varchar(255) COLLATE utf8_bin DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_activity`
--
INSERT INTO `oc8_activity` (`activity_id`, `timestamp`, `priority`, `type`, `user`, `affecteduser`, `app`, `subject`, `subjectparams`, `message`, `messageparams`, `file`, `link`) VALUES
(1, 1430076028, 40, 'file_created', 'admin', 'admin', 'files', 'created_self', 'a:1:{i:0;s:18:"/phaser-master.zip";}', '', 'a:0:{}', '/phaser-master.zip', 'http://127.0.0.1:49080/owncloud/index.php/apps/files?dir=%2F'),
(2, 1430076163, 40, 'file_created', '<EMAIL>', '<EMAIL>', 'files', 'created_self', 'a:1:{i:0;s:15:"/iMovie 211.ipa";}', '', 'a:0:{}', '/iMovie 211.ipa', 'http://127.0.0.1:49080/owncloud/index.php/apps/files?dir=%2F');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_activity_mq`
--
DROP TABLE IF EXISTS `oc8_activity_mq`;
CREATE TABLE IF NOT EXISTS `oc8_activity_mq` (
`mail_id` int(11) NOT NULL,
`amq_timestamp` int(11) NOT NULL DEFAULT '0',
`amq_latest_send` int(11) NOT NULL DEFAULT '0',
`amq_type` varchar(255) COLLATE utf8_bin NOT NULL,
`amq_affecteduser` varchar(64) COLLATE utf8_bin NOT NULL,
`amq_appid` varchar(255) COLLATE utf8_bin NOT NULL,
`amq_subject` varchar(255) COLLATE utf8_bin NOT NULL,
`amq_subjectparams` varchar(255) COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_appconfig`
--
DROP TABLE IF EXISTS `oc8_appconfig`;
CREATE TABLE IF NOT EXISTS `oc8_appconfig` (
`appid` varchar(32) COLLATE utf8_bin NOT NULL DEFAULT '',
`configkey` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`configvalue` longtext COLLATE utf8_bin
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_appconfig`
--
INSERT INTO `oc8_appconfig` (`appid`, `configkey`, `configvalue`) VALUES
('activity', 'enabled', 'yes'),
('activity', 'installed_version', '1.2.0'),
('activity', 'ocsid', '166038'),
('activity', 'types', 'filesystem'),
('backgroundjob', 'lastjob', '2'),
('core', 'global_cache_gc_lastrun', '1430076256'),
('core', 'installedat', '1430075864.5067'),
('core', 'lastcron', '1430076367'),
('core', 'lastupdatedat', '1430075864.5136'),
('core', 'public_files', 'files_sharing/public.php'),
('core', 'public_gallery', 'gallery/public.php'),
('core', 'public_webdav', 'files_sharing/publicwebdav.php'),
('core', 'remote_files', 'files/appinfo/remote.php'),
('core', 'remote_webdav', 'files/appinfo/remote.php'),
('files', 'enabled', 'yes'),
('files', 'installed_version', '1.1.9'),
('files', 'types', 'filesystem'),
('files_locking', 'enabled', 'yes'),
('files_locking', 'installed_version', ''),
('files_locking', 'types', 'filesystem'),
('files_pdfviewer', 'enabled', 'yes'),
('files_pdfviewer', 'installed_version', '0.7'),
('files_pdfviewer', 'ocsid', '166049'),
('files_pdfviewer', 'types', ''),
('files_sharing', 'enabled', 'yes'),
('files_sharing', 'installed_version', '0.6.0'),
('files_sharing', 'ocsid', '166050'),
('files_sharing', 'types', 'filesystem'),
('files_texteditor', 'enabled', 'yes'),
('files_texteditor', 'installed_version', '0.4'),
('files_texteditor', 'ocsid', '166051'),
('files_texteditor', 'types', ''),
('files_trashbin', 'enabled', 'yes'),
('files_trashbin', 'installed_version', '0.6.2'),
('files_trashbin', 'ocsid', '166052'),
('files_trashbin', 'types', 'filesystem'),
('files_versions', 'enabled', 'yes'),
('files_versions', 'installed_version', '1.0.5'),
('files_versions', 'ocsid', '166053'),
('files_versions', 'types', 'filesystem'),
('files_videoviewer', 'enabled', 'yes'),
('files_videoviewer', 'installed_version', '0.1.3'),
('files_videoviewer', 'ocsid', '166054'),
('files_videoviewer', 'types', ''),
('firstrunwizard', 'enabled', 'yes'),
('firstrunwizard', 'installed_version', '1.1'),
('firstrunwizard', 'ocsid', '166055'),
('firstrunwizard', 'types', ''),
('gallery', 'enabled', 'yes'),
('gallery', 'installed_version', '0.6.0'),
('gallery', 'ocsid', '166056'),
('gallery', 'types', ''),
('provisioning_api', 'enabled', 'yes'),
('provisioning_api', 'installed_version', '0.2'),
('provisioning_api', 'types', 'filesystem'),
('revealjs', 'enabled', 'yes'),
('revealjs', 'installed_version', '2.6.1'),
('revealjs', 'types', ''),
('roundcube', 'autoLogin', 'on'),
('roundcube', 'enableDebug', 'on'),
('roundcube', 'enabled', 'yes'),
('roundcube', 'installed_version', '2.6.1'),
('roundcube', 'maildir', '/roundcube/'),
('roundcube', 'rcHost', ''),
('roundcube', 'rcPort', ''),
('roundcube', 'removeControlNav', ''),
('roundcube', 'removeHeaderNav', ''),
('roundcube', 'types', ''),
('storagecharts2', 'enabled', 'yes'),
('storagecharts2', 'installed_version', '2.6.1'),
('storagecharts2', 'types', ''),
('templateeditor', 'enabled', 'yes'),
('templateeditor', 'installed_version', '0.1'),
('templateeditor', 'types', ''),
('updater', 'enabled', 'yes'),
('updater', 'installed_version', '0.4'),
('updater', 'ocsid', '166059'),
('updater', 'types', '');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_filecache`
--
DROP TABLE IF EXISTS `oc8_filecache`;
CREATE TABLE IF NOT EXISTS `oc8_filecache` (
`fileid` int(11) NOT NULL,
`storage` int(11) NOT NULL DEFAULT '0',
`path` varchar(4000) COLLATE utf8_bin DEFAULT NULL,
`path_hash` varchar(32) COLLATE utf8_bin NOT NULL DEFAULT '',
`parent` int(11) NOT NULL DEFAULT '0',
`name` varchar(250) COLLATE utf8_bin DEFAULT NULL,
`mimetype` int(11) NOT NULL DEFAULT '0',
`mimepart` int(11) NOT NULL DEFAULT '0',
`size` bigint(20) NOT NULL DEFAULT '0',
`mtime` int(11) NOT NULL DEFAULT '0',
`storage_mtime` int(11) NOT NULL DEFAULT '0',
`encrypted` int(11) NOT NULL DEFAULT '0',
`unencrypted_size` bigint(20) NOT NULL DEFAULT '0',
`etag` varchar(40) COLLATE utf8_bin DEFAULT NULL,
`permissions` int(11) DEFAULT '0'
) ENGINE=InnoDB AUTO_INCREMENT=33 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_filecache`
--
INSERT INTO `oc8_filecache` (`fileid`, `storage`, `path`, `path_hash`, `parent`, `name`, `mimetype`, `mimepart`, `size`, `mtime`, `storage_mtime`, `encrypted`, `unencrypted_size`, `etag`, `permissions`) VALUES
(1, 1, '', 'd41d8cd98f00b204e9800998ecf8427e', -1, '', 2, 1, 2486024, 1430075865, 1430075864, 0, 0, '553d39d93aea5', 23),
(2, 1, 'cache', '0fea6a13c52b4d4725368f24b045ca84', 1, 'cache', 2, 1, 0, 1430075864, 1430075864, 0, 0, '553d39d8992f6', 31),
(3, 1, 'files', '45b963397aa40d4a0063e0d85e4fe7a1', 1, 'files', 2, 1, 36225799, 1430076028, 1430076028, 0, 0, '553d3a7cae41a', 31),
(4, 1, 'files/Documents', '0ad78ba05b6961d92f7970b2b3922eca', 3, 'Documents', 2, 1, 36227, 1430075864, 1430075864, 0, 0, '553d39d903236', 31),
(5, 1, 'files/Documents/Example.odt', 'c89c560541b952a435783a7d51a27d50', 4, 'Example.odt', 4, 3, 36227, 1430075865, 1430075865, 0, 0, '016691faff3be29034fc948eefa4a6eb', 27),
(6, 1, 'files/Photos', 'd01bb67e7b71dd49fd06bad922f521c9', 3, 'Photos', 2, 1, 678556, 1430075865, 1430075865, 0, 0, '553d39d930fca', 31),
(7, 1, 'files/Photos/Squirrel.jpg', 'de85d1da71bcd6232ad893f959063b8c', 6, 'Squirrel.jpg', 6, 5, 233724, 1430075865, 1430075865, 0, 0, '55f5f5cf9cb64c8ce6d9733b62682b00', 27),
(8, 1, 'files/Photos/Paris.jpg', 'a208ddedf08367bbc56963107248dda5', 6, 'Paris.jpg', 6, 5, 228761, 1430075865, 1430075865, 0, 0, 'cfa030df5025766b81f2a48511fed911', 27),
(9, 1, 'files/Photos/San Francisco.jpg', '9fc714efbeaafee22f7058e73d2b1c3b', 6, 'San Francisco.jpg', 6, 5, 216071, 1430075865, 1430075865, 0, 0, 'f1d261f5c319e92830ef6a69af8999a4', 27),
(10, 1, 'files/ownCloudUserManual.pdf', 'c8edba2d1b8eb651c107b43532c34445', 3, 'ownCloudUserManual.pdf', 7, 3, 1771241, 1430075865, 1430075865, 0, 0, '2837d233dcdde0b4e6fdc6e4e4630fe0', 27),
(11, 1, 'files/phaser-master.zip', 'f2824c932d55b55570275eb21ce510c7', 3, 'phaser-master.zip', 8, 3, 33739775, 1430076028, 1430076028, 0, 0, '0b847772effbd4531466b2458dc5f020', 27),
(12, 3, '', 'd41d8cd98f00b204e9800998ecf8427e', -1, '', 2, 1, 2486024, 1430076041, 1430076040, 0, 0, '553d3a8948bd6', 23),
(13, 3, 'cache', '0fea6a13c52b4d4725368f24b045ca84', 12, 'cache', 2, 1, 0, 1430076040, 1430076040, 0, 0, '553d3a88ee78d', 31),
(14, 3, 'files', '45b963397aa40d4a0063e0d85e4fe7a1', 12, 'files', 2, 1, 718363724, 1430076163, 1430076154, 0, 0, '553d3b03b6ccf', 31),
(15, 3, 'files/Documents', '0ad78ba05b6961d92f7970b2b3922eca', 14, 'Documents', 2, 1, 36227, 1430076041, 1430076041, 0, 0, '553d3a89142e3', 31),
(16, 3, 'files/Documents/Example.odt', 'c89c560541b952a435783a7d51a27d50', 15, 'Example.odt', 4, 3, 36227, 1430076041, 1430076041, 0, 0, '8637d66796e784a64e53fb643912a38a', 27),
(17, 3, 'files/Photos', 'd01bb67e7b71dd49fd06bad922f521c9', 14, 'Photos', 2, 1, 678556, 1430076041, 1430076041, 0, 0, '553d3a894066a', 31),
(18, 3, 'files/Photos/Squirrel.jpg', 'de85d1da71bcd6232ad893f959063b8c', 17, 'Squirrel.jpg', 6, 5, 233724, 1430076041, 1430076041, 0, 0, '6eaa6d378f208c185f02a4e0bf4ac9fd', 27),
(19, 3, 'files/Photos/Paris.jpg', 'a208ddedf08367bbc56963107248dda5', 17, 'Paris.jpg', 6, 5, 228761, 1430076041, 1430076041, 0, 0, 'df937e4a431220fe8f5b06d7a4323218', 27),
(20, 3, 'files/Photos/San Francisco.jpg', '9fc714efbeaafee22f7058e73d2b1c3b', 17, 'San Francisco.jpg', 6, 5, 216071, 1430076041, 1430076041, 0, 0, '61cc38dc10472ed529dbba322bf502de', 27),
(21, 3, 'files/ownCloudUserManual.pdf', 'c8edba2d1b8eb651c107b43532c34445', 14, 'ownCloudUserManual.pdf', 7, 3, 1771241, 1430076041, 1430076041, 0, 0, 'a3b4d3c0b6e7947e31c969efc097b3ef', 27),
(22, 3, 'files/iMovie 211.ipa', 'd458dc39b1185fff22c721fa4b2dc9c7', 14, 'iMovie 211.ipa', 9, 3, 715877700, 1430076163, 1430076163, 0, 0, '1b604eb202bbb53f0a3419b21af2f940', 27),
(23, 4, '', 'd41d8cd98f00b204e9800998ecf8427e', -1, '', 2, 1, 2486024, 1430076363, 1430076363, 0, 0, '553d3bcbdd2c6', 23),
(24, 4, 'cache', '0fea6a13c52b4d4725368f24b045ca84', 23, 'cache', 2, 1, 0, 1430076363, 1430076363, 0, 0, '553d3bcb9372b', 31),
(25, 4, 'files', '45b963397aa40d4a0063e0d85e4fe7a1', 23, 'files', 2, 1, 2486024, 1430076363, 1430076363, 0, 0, '553d3bcbde154', 31),
(26, 4, 'files/Documents', '0ad78ba05b6961d92f7970b2b3922eca', 25, 'Documents', 2, 1, 36227, 1430076363, 1430076363, 0, 0, '553d3bcbab283', 31),
(27, 4, 'files/Documents/Example.odt', 'c89c560541b952a435783a7d51a27d50', 26, 'Example.odt', 4, 3, 36227, 1430076363, 1430076363, 0, 0, '4fb722efd8a68743cfddc2a62151004c', 27),
(28, 4, 'files/Photos', 'd01bb67e7b71dd49fd06bad922f521c9', 25, 'Photos', 2, 1, 678556, 1430076363, 1430076363, 0, 0, '553d3bcbd455a', 31),
(29, 4, 'files/Photos/Squirrel.jpg', 'de85d1da71bcd6232ad893f959063b8c', 28, 'Squirrel.jpg', 6, 5, 233724, 1430076363, 1430076363, 0, 0, 'e32cac50e3d019dd202802af67f21e7a', 27),
(30, 4, 'files/Photos/Paris.jpg', 'a208ddedf08367bbc56963107248dda5', 28, 'Paris.jpg', 6, 5, 228761, 1430076363, 1430076363, 0, 0, '5a396931a41c38eea20c90d56b806dac', 27),
(31, 4, 'files/Photos/San Francisco.jpg', '9fc714efbeaafee22f7058e73d2b1c3b', 28, 'San Francisco.jpg', 6, 5, 216071, 1430076363, 1430076363, 0, 0, '6138bac545662b1718f8971cdac8bffb', 27),
(32, 4, 'files/ownCloudUserManual.pdf', 'c8edba2d1b8eb651c107b43532c34445', 25, 'ownCloudUserManual.pdf', 7, 3, 1771241, 1430076363, 1430076363, 0, 0, 'eb367888b6a8e8e5e222e99cdc59efaf', 27);
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_files_trash`
--
DROP TABLE IF EXISTS `oc8_files_trash`;
CREATE TABLE IF NOT EXISTS `oc8_files_trash` (
`auto_id` int(11) NOT NULL,
`id` varchar(250) COLLATE utf8_bin NOT NULL DEFAULT '',
`user` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`timestamp` varchar(12) COLLATE utf8_bin NOT NULL DEFAULT '',
`location` varchar(512) COLLATE utf8_bin NOT NULL DEFAULT '',
`type` varchar(4) COLLATE utf8_bin DEFAULT NULL,
`mime` varchar(255) COLLATE utf8_bin DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_file_map`
--
DROP TABLE IF EXISTS `oc8_file_map`;
CREATE TABLE IF NOT EXISTS `oc8_file_map` (
`logic_path` varchar(512) COLLATE utf8_bin NOT NULL DEFAULT '',
`logic_path_hash` varchar(32) COLLATE utf8_bin NOT NULL DEFAULT '',
`physic_path` varchar(512) COLLATE utf8_bin NOT NULL DEFAULT '',
`physic_path_hash` varchar(32) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_groups`
--
DROP TABLE IF EXISTS `oc8_groups`;
CREATE TABLE IF NOT EXISTS `oc8_groups` (
`gid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_groups`
--
INSERT INTO `oc8_groups` (`gid`) VALUES
('admin');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_group_admin`
--
DROP TABLE IF EXISTS `oc8_group_admin`;
CREATE TABLE IF NOT EXISTS `oc8_group_admin` (
`gid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`uid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_group_user`
--
DROP TABLE IF EXISTS `oc8_group_user`;
CREATE TABLE IF NOT EXISTS `oc8_group_user` (
`gid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`uid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_group_user`
--
INSERT INTO `oc8_group_user` (`gid`, `uid`) VALUES
('admin', 'admin');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_jobs`
--
DROP TABLE IF EXISTS `oc8_jobs`;
CREATE TABLE IF NOT EXISTS `oc8_jobs` (
`id` int(10) unsigned NOT NULL,
`class` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT '',
`argument` varchar(256) COLLATE utf8_bin NOT NULL DEFAULT '',
`last_run` int(11) DEFAULT '0'
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_jobs`
--
INSERT INTO `oc8_jobs` (`id`, `class`, `argument`, `last_run`) VALUES
(1, 'OC\\Cache\\FileGlobalGC', 'null', 1430076364),
(2, 'OCA\\Activity\\BackgroundJob\\EmailNotification', 'null', 1430075888),
(3, 'OCA\\Activity\\BackgroundJob\\ExpireActivities', 'null', 1430075927);
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_locks`
--
DROP TABLE IF EXISTS `oc8_locks`;
CREATE TABLE IF NOT EXISTS `oc8_locks` (
`id` int(10) unsigned NOT NULL,
`userid` varchar(64) COLLATE utf8_bin DEFAULT NULL,
`owner` varchar(100) COLLATE utf8_bin DEFAULT NULL,
`timeout` int(10) unsigned DEFAULT NULL,
`created` bigint(20) DEFAULT NULL,
`token` varchar(100) COLLATE utf8_bin DEFAULT NULL,
`scope` smallint(6) DEFAULT NULL,
`depth` smallint(6) DEFAULT NULL,
`uri` longtext COLLATE utf8_bin
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_mimetypes`
--
DROP TABLE IF EXISTS `oc8_mimetypes`;
CREATE TABLE IF NOT EXISTS `oc8_mimetypes` (
`id` int(11) NOT NULL,
`mimetype` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_mimetypes`
--
INSERT INTO `oc8_mimetypes` (`id`, `mimetype`) VALUES
(3, 'application'),
(9, 'application/octet-stream'),
(7, 'application/pdf'),
(4, 'application/vnd.oasis.opendocument.text'),
(8, 'application/zip'),
(1, 'httpd'),
(2, 'httpd/unix-directory'),
(5, 'image'),
(6, 'image/jpeg');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_preferences`
--
DROP TABLE IF EXISTS `oc8_preferences`;
CREATE TABLE IF NOT EXISTS `oc8_preferences` (
`userid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`appid` varchar(32) COLLATE utf8_bin NOT NULL DEFAULT '',
`configkey` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`configvalue` longtext COLLATE utf8_bin
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_preferences`
--
INSERT INTO `oc8_preferences` (`userid`, `appid`, `configkey`, `configvalue`) VALUES
('admin', 'core', 'timezone', 'Europe/Berlin'),
('admin', 'firstrunwizard', 'show', '0'),
('admin', 'login', 'lastLogin', '1430076256'),
('<EMAIL>', 'core', 'timezone', 'Europe/Berlin'),
('<EMAIL>', 'firstrunwizard', 'show', '0'),
('<EMAIL>', 'login', 'lastLogin', '1430076363'),
('<EMAIL>', 'core', 'timezone', 'Europe/Berlin'),
('<EMAIL>', 'firstrunwizard', 'show', '0'),
('<EMAIL>', 'login', 'lastLogin', '1430076356'),
('<EMAIL>', 'roundcube', 'privateSSLKey', '-----<KEY>'),
('<EMAIL>', 'roundcube', 'publicSSLKey', '-----<KEY>');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_privatedata`
--
DROP TABLE IF EXISTS `oc8_privatedata`;
CREATE TABLE IF NOT EXISTS `oc8_privatedata` (
`keyid` int(10) unsigned NOT NULL,
`user` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`app` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT '',
`key` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT '',
`value` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_properties`
--
DROP TABLE IF EXISTS `oc8_properties`;
CREATE TABLE IF NOT EXISTS `oc8_properties` (
`id` int(11) NOT NULL,
`userid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`propertypath` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT '',
`propertyname` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT '',
`propertyvalue` varchar(255) COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_roundcube`
--
DROP TABLE IF EXISTS `oc8_roundcube`;
CREATE TABLE IF NOT EXISTS `oc8_roundcube` (
`id` bigint(20) NOT NULL,
`oc_user` varchar(4096) COLLATE utf8_bin NOT NULL DEFAULT '',
`mail_user` varchar(4096) COLLATE utf8_bin NOT NULL DEFAULT '',
`mail_password` varchar(4096) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_roundcube`
--
INSERT INTO `oc8_roundcube` (`id`, `oc_user`, `mail_user`, `mail_password`) VALUES
(1, '<EMAIL>', '', '');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_share`
--
DROP TABLE IF EXISTS `oc8_share`;
CREATE TABLE IF NOT EXISTS `oc8_share` (
`id` int(11) NOT NULL,
`share_type` smallint(6) NOT NULL DEFAULT '0',
`share_with` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`uid_owner` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`parent` int(11) DEFAULT NULL,
`item_type` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`item_source` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`item_target` varchar(255) COLLATE utf8_bin DEFAULT NULL,
`file_source` int(11) DEFAULT NULL,
`file_target` varchar(512) COLLATE utf8_bin DEFAULT NULL,
`permissions` smallint(6) NOT NULL DEFAULT '0',
`stime` bigint(20) NOT NULL DEFAULT '0',
`accepted` smallint(6) NOT NULL DEFAULT '0',
`expiration` datetime DEFAULT NULL,
`token` varchar(32) COLLATE utf8_bin DEFAULT NULL,
`mail_send` smallint(6) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_share_external`
--
DROP TABLE IF EXISTS `oc8_share_external`;
CREATE TABLE IF NOT EXISTS `oc8_share_external` (
`id` int(11) NOT NULL,
`remote` varchar(512) COLLATE utf8_bin NOT NULL COMMENT 'Url of the remove owncloud instance',
`remote_id` int(11) NOT NULL,
`share_token` varchar(64) COLLATE utf8_bin NOT NULL COMMENT 'Public share token',
`password` varchar(64) COLLATE utf8_bin DEFAULT NULL COMMENT 'Optional password for the public share',
`name` varchar(64) COLLATE utf8_bin NOT NULL COMMENT 'Original name on the remote server',
`owner` varchar(64) COLLATE utf8_bin NOT NULL COMMENT 'User that owns the public share on the remote server',
`user` varchar(64) COLLATE utf8_bin NOT NULL COMMENT 'Local user which added the external share',
`mountpoint` varchar(4000) COLLATE utf8_bin NOT NULL COMMENT 'Full path where the share is mounted',
`mountpoint_hash` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'md5 hash of the mountpoint',
`accepted` int(11) NOT NULL DEFAULT '0'
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_storagecharts2`
--
DROP TABLE IF EXISTS `oc8_storagecharts2`;
CREATE TABLE IF NOT EXISTS `oc8_storagecharts2` (
`stc_id` int(10) unsigned NOT NULL,
`oc_uid` varchar(64) COLLATE utf8_bin NOT NULL,
`stc_month` bigint(20) NOT NULL,
`stc_dayts` bigint(20) NOT NULL,
`stc_used` double NOT NULL,
`stc_total` double NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_storagecharts2`
--
INSERT INTO `oc8_storagecharts2` (`stc_id`, `oc_uid`, `stc_month`, `stc_dayts`, `stc_used`, `stc_total`) VALUES
(1, 'admin', 201504, 1430006400, 754678215, 16343579079),
(2, '<EMAIL>', 201504, 1430006400, 754701244, 16343495612),
(3, '<EMAIL>', 201504, 1430006400, 757211208, 16343269448);
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_storagecharts2_uconf`
--
DROP TABLE IF EXISTS `oc8_storagecharts2_uconf`;
CREATE TABLE IF NOT EXISTS `oc8_storagecharts2_uconf` (
`uc_id` int(10) unsigned NOT NULL,
`oc_uid` varchar(64) COLLATE utf8_bin NOT NULL,
`uc_key` varchar(64) COLLATE utf8_bin NOT NULL,
`uc_val` varchar(255) COLLATE utf8_bin NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_storagecharts2_uconf`
--
INSERT INTO `oc8_storagecharts2_uconf` (`uc_id`, `oc_uid`, `uc_key`, `uc_val`) VALUES
(1, 'admin', 'hu_size', '3'),
(2, 'admin', 'hu_size_hus', '3'),
(3, '<EMAIL>', 'hu_size', '3'),
(4, '<EMAIL>', 'hu_size_hus', '3');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_storages`
--
DROP TABLE IF EXISTS `oc8_storages`;
CREATE TABLE IF NOT EXISTS `oc8_storages` (
`id` varchar(64) COLLATE utf8_bin DEFAULT NULL,
`numeric_id` int(11) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_storages`
--
INSERT INTO `oc8_storages` (`id`, `numeric_id`) VALUES
('home::admin', 1),
('home::<EMAIL>', 4),
('home::<EMAIL>', 3),
('local::/var/www/owncloud/data/', 2);
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_users`
--
DROP TABLE IF EXISTS `oc8_users`;
CREATE TABLE IF NOT EXISTS `oc8_users` (
`uid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`displayname` varchar(64) COLLATE utf8_bin DEFAULT NULL,
`password` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Daten für Tabelle `oc_users`
--
INSERT INTO `oc8_users` (`uid`, `displayname`, `password`) VALUES
('admin', NULL, '1|$2y$10$YYUoykjZyfqV3Azh8ADxTOWvz96SUbcVyKuMTBfkDpcndxLqhAWdC'),
('<EMAIL>', NULL, '1|$2y$10$c4VIk8.yJ80rLoSJIchcMeb7TZxrcz0V/N3WctFeXUoZ4uKYmyMU2'),
('<EMAIL>', NULL, '1|$2y$10$D4vIIck/oLwDprW3z/8b..ikpw0xVouIoqpxhS4YZqpG0tChwJnCW');
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_vcategory`
--
DROP TABLE IF EXISTS `oc8_vcategory`;
CREATE TABLE IF NOT EXISTS `oc8_vcategory` (
`id` int(10) unsigned NOT NULL,
`uid` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`type` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT '',
`category` varchar(255) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-- --------------------------------------------------------
--
-- Tabellenstruktur für Tabelle `oc_vcategory_to_object`
--
DROP TABLE IF EXISTS `oc8_vcategory_to_object`;
CREATE TABLE IF NOT EXISTS `oc8_vcategory_to_object` (
`objid` int(10) unsigned NOT NULL DEFAULT '0',
`categoryid` int(10) unsigned NOT NULL DEFAULT '0',
`type` varchar(64) COLLATE utf8_bin NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
--
-- Indizes der exportierten Tabellen
--
--
-- Indizes für die Tabelle `oc_activity`
--
ALTER TABLE `oc8_activity`
ADD PRIMARY KEY (`activity_id`), ADD KEY `activity_user_time` (`affecteduser`,`timestamp`), ADD KEY `activity_filter_by` (`affecteduser`,`user`,`timestamp`), ADD KEY `activity_filter_app` (`affecteduser`,`app`,`timestamp`);
--
-- Indizes für die Tabelle `oc_activity_mq`
--
ALTER TABLE `oc8_activity_mq`
ADD PRIMARY KEY (`mail_id`), ADD KEY `amp_user` (`amq_affecteduser`), ADD KEY `amp_latest_send_time` (`amq_latest_send`), ADD KEY `amp_timestamp_time` (`amq_timestamp`);
--
-- Indizes für die Tabelle `oc_appconfig`
--
ALTER TABLE `oc8_appconfig`
ADD PRIMARY KEY (`appid`,`configkey`), ADD KEY `appconfig_config_key_index` (`configkey`), ADD KEY `appconfig_appid_key` (`appid`);
--
-- Indizes für die Tabelle `oc_filecache`
--
ALTER TABLE `oc8_filecache`
ADD PRIMARY KEY (`fileid`), ADD UNIQUE KEY `fs_storage_path_hash` (`storage`,`path_hash`), ADD KEY `fs_parent_name_hash` (`parent`,`name`), ADD KEY `fs_storage_mimetype` (`storage`,`mimetype`), ADD KEY `fs_storage_mimepart` (`storage`,`mimepart`), ADD KEY `fs_storage_size` (`storage`,`size`,`fileid`);
--
-- Indizes für die Tabelle `oc_files_trash`
--
ALTER TABLE `oc8_files_trash`
ADD PRIMARY KEY (`auto_id`), ADD KEY `id_index` (`id`), ADD KEY `timestamp_index` (`timestamp`), ADD KEY `user_index` (`user`);
--
-- Indizes für die Tabelle `oc_file_map`
--
ALTER TABLE `oc8_file_map`
ADD PRIMARY KEY (`logic_path_hash`), ADD UNIQUE KEY `file_map_pp_index` (`physic_path_hash`);
--
-- Indizes für die Tabelle `oc_groups`
--
ALTER TABLE `oc8_groups`
ADD PRIMARY KEY (`gid`);
--
-- Indizes für die Tabelle `oc_group_admin`
--
ALTER TABLE `oc8_group_admin`
ADD PRIMARY KEY (`gid`,`uid`), ADD KEY `group_admin_uid` (`uid`);
--
-- Indizes für die Tabelle `oc_group_user`
--
ALTER TABLE `oc8_group_user`
ADD PRIMARY KEY (`gid`,`uid`);
--
-- Indizes für die Tabelle `oc_jobs`
--
ALTER TABLE `oc8_jobs`
ADD PRIMARY KEY (`id`), ADD KEY `job_class_index` (`class`);
--
-- Indizes für die Tabelle `oc_locks`
--
ALTER TABLE `oc8_locks`
ADD PRIMARY KEY (`id`);
--
-- Indizes für die Tabelle `oc_mimetypes`
--
ALTER TABLE `oc8_mimetypes`
ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `mimetype_id_index` (`mimetype`);
--
-- Indizes für die Tabelle `oc_preferences`
--
ALTER TABLE `oc8_preferences`
ADD PRIMARY KEY (`userid`,`appid`,`configkey`);
--
-- Indizes für die Tabelle `oc_privatedata`
--
ALTER TABLE `oc8_privatedata`
ADD PRIMARY KEY (`keyid`);
--
-- Indizes für die Tabelle `oc_properties`
--
ALTER TABLE `oc8_properties`
ADD PRIMARY KEY (`id`), ADD KEY `property_index` (`userid`);
--
-- Indizes für die Tabelle `oc_roundcube`
--
ALTER TABLE `oc8_roundcube`
ADD PRIMARY KEY (`id`);
--
-- Indizes für die Tabelle `oc_share`
--
ALTER TABLE `oc8_share`
ADD PRIMARY KEY (`id`), ADD KEY `item_share_type_index` (`item_type`,`share_type`), ADD KEY `file_source_index` (`file_source`), ADD KEY `token_index` (`token`);
--
-- Indizes für die Tabelle `oc_share_external`
--
ALTER TABLE `oc8_share_external`
ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `sh_external_mp` (`user`,`mountpoint_hash`), ADD KEY `sh_external_user` (`user`);
--
-- Indizes für die Tabelle `oc_storagecharts2`
--
ALTER TABLE `oc8_storagecharts2`
ADD PRIMARY KEY (`stc_id`);
--
-- Indizes für die Tabelle `oc_storagecharts2_uconf`
--
ALTER TABLE `oc8_storagecharts2_uconf`
ADD PRIMARY KEY (`uc_id`);
--
-- Indizes für die Tabelle `oc_storages`
--
ALTER TABLE `oc8_storages`
ADD PRIMARY KEY (`numeric_id`), ADD UNIQUE KEY `storages_id_index` (`id`);
--
-- Indizes für die Tabelle `oc_users`
--
ALTER TABLE `oc8_users`
ADD PRIMARY KEY (`uid`);
--
-- Indizes für die Tabelle `oc_vcategory`
--
ALTER TABLE `oc8_vcategory`
ADD PRIMARY KEY (`id`), ADD KEY `uid_index` (`uid`), ADD KEY `type_index` (`type`), ADD KEY `category_index` (`category`);
--
-- Indizes für die Tabelle `oc_vcategory_to_object`
--
ALTER TABLE `oc8_vcategory_to_object`
ADD PRIMARY KEY (`categoryid`,`objid`,`type`), ADD KEY `vcategory_objectd_index` (`objid`,`type`);
--
-- AUTO_INCREMENT für exportierte Tabellen
--
--
-- AUTO_INCREMENT für Tabelle `oc_activity`
--
ALTER TABLE `oc8_activity`
MODIFY `activity_id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT für Tabelle `oc_activity_mq`
--
ALTER TABLE `oc8_activity_mq`
MODIFY `mail_id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_filecache`
--
ALTER TABLE `oc8_filecache`
MODIFY `fileid` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=33;
--
-- AUTO_INCREMENT für Tabelle `oc_files_trash`
--
ALTER TABLE `oc8_files_trash`
MODIFY `auto_id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_jobs`
--
ALTER TABLE `oc8_jobs`
MODIFY `id` int(10) unsigned NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT für Tabelle `oc_locks`
--
ALTER TABLE `oc8_locks`
MODIFY `id` int(10) unsigned NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_mimetypes`
--
ALTER TABLE `oc8_mimetypes`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT für Tabelle `oc_privatedata`
--
ALTER TABLE `oc8_privatedata`
MODIFY `keyid` int(10) unsigned NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_properties`
--
ALTER TABLE `oc8_properties`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_roundcube`
--
ALTER TABLE `oc8_roundcube`
MODIFY `id` bigint(20) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT für Tabelle `oc_share`
--
ALTER TABLE `oc8_share`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_share_external`
--
ALTER TABLE `oc8_share_external`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT für Tabelle `oc_storagecharts2`
--
ALTER TABLE `oc8_storagecharts2`
MODIFY `stc_id` int(10) unsigned NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT für Tabelle `oc_storagecharts2_uconf`
--
ALTER TABLE `oc8_storagecharts2_uconf`
MODIFY `uc_id` int(10) unsigned NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT für Tabelle `oc_storages`
--
ALTER TABLE `oc8_storages`
MODIFY `numeric_id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT für Tabelle `oc_vcategory`
--
ALTER TABLE `oc8_vcategory`
MODIFY `id` int(10) unsigned NOT NULL AUTO_INCREMENT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
CREATE TABLE `test_a` (
`id` int(11) NOT NULL,
`propA` varchar(190) COLLATE utf8mb4_unicode_ci NOT NULL,
`createdAt` datetime NOT NULL,
`modifiedAt` datetime NOT NULL,
`deletedAt` datetime DEFAULT NULL
) ENGINE=InnoDB;
-- --------------------------------------------------------
--
-- Table structure for table `test_a_has_many`
--
CREATE TABLE `test_a_has_many` (
`id` int(11) NOT NULL,
`aId` int(11) NOT NULL,
`propOfHasManyA` varchar(190) COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB;
-- --------------------------------------------------------
--
-- Table structure for table `test_a_has_one`
--
CREATE TABLE `test_a_has_one` (
`id` int(11) NOT NULL,
`aId` int(11) NOT NULL,
`propA` varchar(190) COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB;
-- --------------------------------------------------------
--
-- Table structure for table `test_b`
--
CREATE TABLE `test_b` (
`id` int(11) NOT NULL,
`propB` varchar(190) COLLATE utf8mb4_unicode_ci NOT NULL,
`cId` int(11) DEFAULT NULL,
`userId` int(11) NOT NULL
) ENGINE=InnoDB;
-- --------------------------------------------------------
--
-- Table structure for table `test_c`
--
CREATE TABLE `test_c` (
`id` int(11) NOT NULL,
`name` varchar(190) COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `test_a`
--
ALTER TABLE `test_a`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `test_a_has_many`
--
ALTER TABLE `test_a_has_many`
ADD PRIMARY KEY (`id`,`aId`),
ADD KEY `aId` (`aId`);
--
-- Indexes for table `test_a_has_one`
--
ALTER TABLE `test_a_has_one`
ADD PRIMARY KEY (`id`,`aId`),
ADD KEY `aId` (`aId`);
--
-- Indexes for table `test_b`
--
ALTER TABLE `test_b`
ADD PRIMARY KEY (`id`),
ADD KEY `cId` (`cId`),
ADD KEY `userId` (`userId`);
--
-- Indexes for table `test_c`
--
ALTER TABLE `test_c`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `test_a`
--
ALTER TABLE `test_a`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=29;
--
-- AUTO_INCREMENT for table `test_a_has_many`
--
ALTER TABLE `test_a_has_many`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=68;
--
-- AUTO_INCREMENT for table `test_a_has_one`
--
ALTER TABLE `test_a_has_one`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `test_c`
--
ALTER TABLE `test_c`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=88;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `test_a_has_many`
--
ALTER TABLE `test_a_has_many`
ADD CONSTRAINT `test_a_has_many_ibfk_1` FOREIGN KEY (`aId`) REFERENCES `test_a` (`id`) ON DELETE CASCADE;
--
-- Constraints for table `test_a_has_one`
--
ALTER TABLE `test_a_has_one`
ADD CONSTRAINT `test_a_has_one_ibfk_1` FOREIGN KEY (`aId`) REFERENCES `test_a` (`id`) ON DELETE CASCADE;
--
-- Constraints for table `test_b`
--
ALTER TABLE `test_b`
ADD CONSTRAINT `test_b_ibfk_1` FOREIGN KEY (`id`) REFERENCES `test_a` (`id`) ON DELETE CASCADE,
ADD CONSTRAINT `test_b_ibfk_2` FOREIGN KEY (`cId`) REFERENCES `test_c` (`id`) ON DELETE SET NULL,
ADD CONSTRAINT `test_b_ibfk_3` FOREIGN KEY (`userId`) REFERENCES `core_user` (`id`) ON DELETE CASCADE;
CREATE TABLE `test_d` (
`id` int(11) NOT NULL,
`propD` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL
) ENGINE=InnoDB;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `test_d`
--
ALTER TABLE `test_d`
ADD PRIMARY KEY (`id`);
|
--
-- Name: test_get_data_single(integer, text, refcursor, text); Type: PROCEDURE; Schema: public; Owner: d3l243
--
CREATE OR REPLACE PROCEDURE public.test_get_data_single(_itemid integer, INOUT _message text DEFAULT ''::text, INOUT _result_one refcursor DEFAULT 'rs_resultone'::refcursor, INOUT _returncode text DEFAULT ''::text)
LANGUAGE plpgsql
AS $$
BEGIN
_message := 'Test message for item ' || COALESCE(_itemID, 0);
_returnCode := '';
open _result_one for
SELECT *
FROM (values (1,2,3, 'fruit', current_timestamp - INTERVAL '5 seconds'),
(4,5,6, 'veggie', current_timestamp)) as t(a,b,c,d,e);
END;
$$;
ALTER PROCEDURE public.test_get_data_single(_itemid integer, INOUT _message text, INOUT _result_one refcursor, INOUT _returncode text) OWNER TO d3l243;
|
<gh_stars>0
-- @testpoint:opengauss关键字dictionary(非保留),作为模式名
--关键字不带引号-成功
drop schema if exists dictionary;
create schema dictionary;
drop schema dictionary;
--关键字带双引号-成功
drop schema if exists "dictionary";
create schema "dictionary";
drop schema "dictionary";
--关键字带单引号-合理报错
drop schema if exists 'dictionary';
create schema 'dictionary';
--关键字带反引号-合理报错
drop schema if exists `dictionary`;
create schema `dictionary`;
|
CREATE TABLE IF NOT EXISTS articles (
id UUID DEFAULT uuid_generate_v4() PRIMARY KEY,
article_id SERIAL,
slug VARCHAR(255) UNIQUE NOT NULL,
title VARCHAR(255) NOT NULL,
description TEXT NOT NULL,
body TEXT NOT NULL,
created_at TIMESTAMP DEFAULT NOW() NOT NULL,
updated_at TIMESTAMP DEFAULT NOW() NOT NULL,
author_id UUID NOT NULL,
CONSTRAINT fk_articles_author
FOREIGN KEY (author_id)
REFERENCES users(id)
ON DELETE CASCADE
);
|
<reponame>rubytomato/docker-java-develop<gh_stars>0
USE sample_db;
START TRANSACTION;
INSERT INTO memo (title, description, done, updated) VALUES ('Memo A', 'aISms0a02jsy47xk4kao28FlqUqnwl', false, '2017-10-01');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo B', 'i5hxoG8rm29quqububMr9gu1OQia75', false, '2017-10-02');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo C', '5L18coGlrHao3yz9xur8c9vpDhfu6o', true, '2017-10-02');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo D', 'Mw8Xu1itnr8fkdn4lbpWJfpe91mg71', false, '2017-10-03');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo E', 'Xl6ngkzic8w05orn6hc7b82hthXJrK', true, '2017-10-03');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo F', '5kspemtnWUQ94lemehf0f0aM482iqo', false, '2017-10-03');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo G', 'lB82Lq8riepbo395UejquBnbu40syd', false, '2017-10-04');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo H', 'ptye0qPrig7tyZh59Sut74Lqnwk4j3', true, '2017-10-05');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo I', '9d7sien1mgyxu37gI6nqHfhFmx95iV', true, '2017-10-06');
INSERT INTO memo (title, description, done, updated) VALUES ('Memo J', 'zPwyxLEp50am18quX0d7bnajeo1k2j', true, '2017-10-06');
COMMIT;
|
<filename>db_point_of_sale.sql<gh_stars>0
-- phpMyAdmin SQL Dump
-- version 4.9.0.1
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Waktu pembuatan: 18 Jul 2020 pada 10.39
-- Versi server: 10.3.16-MariaDB
-- Versi PHP: 7.3.7
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_point_of_sale`
--
-- --------------------------------------------------------
--
-- Struktur dari tabel `mt_category`
--
CREATE TABLE `mt_category` (
`id_kategori` int(11) NOT NULL,
`kategori` varchar(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `mt_category`
--
INSERT INTO `mt_category` (`id_kategori`, `kategori`) VALUES
(1, 'Makanan'),
(2, 'Minuman'),
(8, 'Camilan');
-- --------------------------------------------------------
--
-- Struktur dari tabel `mt_items`
--
CREATE TABLE `mt_items` (
`id_item` char(15) NOT NULL,
`nama_item` varchar(20) NOT NULL,
`id_kategori` int(11) NOT NULL,
`harga_item` float NOT NULL,
`status_item` int(11) NOT NULL,
`deskripsi_item` text NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `mt_items`
--
INSERT INTO `mt_items` (`id_item`, `nama_item`, `id_kategori`, `harga_item`, `status_item`, `deskripsi_item`) VALUES
('MENU-05105410', '<NAME>', 1, 12000, 1, 'Ikan peda yang dipepes dengan daun pisang dan dibumbui dengan olahan khas Sukabumi'),
('MENU-08100914', 'Es Jeruk', 2, 7000, 1, 'Minuman dengan jeruk peras alami '),
('MENU-08100916', 'Es Teh Manis', 2, 5000, 1, 'Teh manis dingin dengan kenikmatan yang haqiqi'),
('MENU-08105612', 'Ikan Asin Jambal', 1, 7000, 1, 'Ikan Asin Jambal yang Mantap'),
('MENU-11075972', 'Ayam Bakar', 1, 20000, 1, 'Test'),
('MENU-1308122', 'Sop Iga', 1, 30000, 1, 'Sop Nigga Sapi'),
('MENU-1308401', 'Ayam Goreng', 1, 15000, 1, 'Ayam Goreng Spesial Dapur Sunda Bu Yuyu'),
('MENU-16100056', '<NAME>', 1, 20000, 1, 'Pepes ayam khas Sukabumi'),
('MENU-16100128', 'Kulit Ayam Goreng', 1, 7000, 1, 'Sate kulit ayam goreng'),
('MENU-16100370', 'Teh Pucuk', 2, 4000, 1, 'Teh pucuk'),
('MENU-16100746', 'Bakwan Jagung', 1, 5000, 1, 'Gorengan bakwan jagung'),
('MENU-16100918', 'Pepes Ikan Mas', 1, 20000, 1, 'Pepes Ikan Mas dengan Bumbu Khas Sukabumi'),
('MENU-16100924', 'Paru Goreng', 1, 15000, 1, 'Paru sapi goreng dengan bumbu asam manis'),
('MENU-16101150', 'Tumis Kangkung', 1, 10000, 1, 'Tumis kangkung '),
('MENU-16101236', 'Ikan Lele Goreng', 1, 12000, 1, 'Ikan lele goreng dengan bumbu khas Sukabumi'),
('MENU-16101362', 'Ikan Gurame Goreng', 1, 25000, 1, 'Ikan Goreng Gurame Khas Sukabumi'),
('MENU-16101532', 'Ikan Mas Goreng', 1, 15000, 1, 'Ikan mas goreng dengan bumbu khas Sukabumi'),
('MENU-16101742', 'Tahu Goreng', 1, 2000, 1, 'Tahu goreng'),
('MENU-16102358', '<NAME>', 1, 8000, 1, 'Pepes Jamur khas Sukabumi'),
('MENU-16102648', 'Perkedel Kentang', 1, 5000, 1, 'Perkedel jagung terbaik'),
('MENU-16102722', 'Gepuk Empal', 1, 15000, 1, 'Daging Sapi dengan olahan gepuk'),
('MENU-16103364', 'Ikan B<NAME>oreng', 1, 15000, 1, 'Ikan Bawal goreng khas Sukabumi'),
('MENU-16103454', 'Pepes Ikan Peda', 1, 12000, 1, 'Pepes Ikan Peda dengan bumbu khas Sukabumi'),
('MENU-16103634', 'Ikan Nila Goreng', 1, 15000, 1, 'Ikan nila goreng dengan bumbu khas Sukabumi'),
('MENU-16103668', 'Air Mineral', 2, 5000, 1, 'Aqua, Fit / Oasis'),
('MENU-16104330', 'Ati Ampela Goreng', 1, 7000, 1, 'Sate ati ampela yang digoreng'),
('MENU-16104338', 'Ikan Peda Goreng', 1, 10000, 1, 'Ikan peda digoreng dengan bumbu khas Sukabumi'),
('MENU-16104560', '<NAME>', 1, 5000, 1, 'Pepes tahu khas Sukabumi'),
('MENU-16104740', 'Ikan Sepat Goreng', 1, 7000, 1, 'Ikan Sepat asin yang digoreng'),
('MENU-16104944', 'Tempe Goreng', 1, 2000, 1, 'Tempe goreng'),
('MENU-16105126', 'Udang Goreng', 1, 10000, 1, 'Sate udang dengan 3 udang/tusuk'),
('MENU-16105666', 'Teh Tawar', 2, 3000, 1, 'Teh Tawar'),
('MENU-16105720', '<NAME>', 1, 18000, 1, 'Ayam <NAME>'),
('MENU-16105852', 'Pepes Ikan Kembung', 1, 15000, 1, 'Ikan kembung yang dipepes dengan bumbu khas sukabumi'),
('MENU-17103774', 'Sample A', 1, 10000, 1, 'Test'),
('MENU-2609194', 'Sayur Asem', 1, 7000, 1, 'Sayur Asem Enak'),
('MENU-27062676', 'Contoh 1', 1, 15000, 1, 'Test');
-- --------------------------------------------------------
--
-- Struktur dari tabel `mt_payment_methode`
--
CREATE TABLE `mt_payment_methode` (
`id_payment` int(11) NOT NULL,
`nama_payment` varchar(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `mt_payment_methode`
--
INSERT INTO `mt_payment_methode` (`id_payment`, `nama_payment`) VALUES
(1, 'Cash'),
(2, 'Go-Pay'),
(3, 'Ovo'),
(5, 'Debit BCA'),
(6, 'Link aja!');
-- --------------------------------------------------------
--
-- Struktur dari tabel `mt_staff`
--
CREATE TABLE `mt_staff` (
`id_staff` int(11) NOT NULL,
`nama_staff` varchar(20) NOT NULL,
`jns_klmn_staff` char(10) NOT NULL,
`no_telp_staff` varchar(20) NOT NULL,
`alamat_staff` text NOT NULL,
`jabatan_staff` varchar(20) NOT NULL,
`status_staff` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `mt_staff`
--
INSERT INTO `mt_staff` (`id_staff`, `nama_staff`, `jns_klmn_staff`, `no_telp_staff`, `alamat_staff`, `jabatan_staff`, `status_staff`) VALUES
(4, '<NAME>', 'Perempuam', '-', 'Tangerang Selatan', 'Kepala Koki', 1),
(11, '<NAME>', 'Perempuam', '085959165499', 'Villa Mutiara Serpong D3/26', 'Kepala Toko', 1),
(12, '<NAME>', 'Perempuam', '085780887394', 'Gg. Hj. Joan RT.004/002 Tangerang Selatan', 'Asisten Koki', 1),
(13, '<NAME>', 'Perempuam', '085676789876', 'Cisauk', 'Pelayan', 1),
(14, 'Habibah', 'Perempuam', '089698090023', 'Tangerang Selatan', 'Kasir', 1);
-- --------------------------------------------------------
--
-- Struktur dari tabel `ts_order`
--
CREATE TABLE `ts_order` (
`id_invoice` char(15) NOT NULL,
`nama_customer` varchar(20) NOT NULL,
`diskon` int(11) NOT NULL,
`total_bayar` int(9) NOT NULL,
`id_user` int(11) NOT NULL,
`ts_order_date` date NOT NULL,
`ts_order_time` time NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `ts_order`
--
INSERT INTO `ts_order` (`id_invoice`, `nama_customer`, `diskon`, `total_bayar`, `id_user`, `ts_order_date`, `ts_order_time`) VALUES
('DSBY-0907-6', 'Ipan', 0, 10000, 18, '2020-07-09', '21:36:36'),
('DSBY-0910-1', 'Rafi', 10, 90900, 2, '2019-10-09', '10:07:06'),
('DSBY-1107-7', 'Test', 0, 70000, 18, '2020-07-11', '09:33:48'),
('DSBY-1107-8', 'Test 3', 0, 20000, 18, '2020-07-11', '09:44:56'),
('DSBY-1610-2', 'Jamal', 0, 150000, 18, '2019-10-16', '22:36:20'),
('DSBY-1702-4', '<NAME>', 10, 40500, 19, '2020-02-18', '06:03:49'),
('DSBY-1710-3', 'Maulana', 10, 32400, 18, '2019-10-17', '15:38:55'),
('DSBY-2906-5', '<NAME>', 0, 14000, 18, '2020-06-29', '14:19:51');
-- --------------------------------------------------------
--
-- Struktur dari tabel `ts_order_detail`
--
CREATE TABLE `ts_order_detail` (
`id_invoice` char(15) NOT NULL,
`id_item` char(15) NOT NULL,
`jumlah_order` int(3) NOT NULL,
`id_payment` int(11) NOT NULL,
`subtotal` int(7) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `ts_order_detail`
--
INSERT INTO `ts_order_detail` (`id_invoice`, `id_item`, `jumlah_order`, `id_payment`, `subtotal`) VALUES
('DSBY-0910-1', 'MENU-08100914', 3, 1, 21000),
('DSBY-0910-1', 'MENU-0410446', 4, 1, 20000),
('DSBY-0910-1', 'MENU-1308401', 4, 1, 60000),
('DSBY-1610-2', 'MENU-0410446', 3, 5, 15000),
('DSBY-1610-2', 'MENU-16100056', 3, 5, 60000),
('DSBY-1610-2', 'MENU-08100916', 3, 5, 15000),
('DSBY-1610-2', 'MENU-1308122', 2, 5, 60000),
('DSBY-1710-3', 'MENU-0510358', 3, 1, 21000),
('DSBY-1710-3', 'MENU-0410446', 3, 1, 15000),
('DSBY-1702-4', 'MENU-1308401', 3, 1, 45000),
('DSBY-2906-5', 'MENU-08100914', 2, 1, 14000),
('DSBY-0907-6', 'MENU-16100746', 2, 2, 10000),
('DSBY-1107-7', 'MENU-11075972', 2, 1, 40000),
('DSBY-1107-7', 'MENU-16103364', 2, 1, 30000),
('DSBY-1107-8', 'MENU-16104338', 2, 1, 20000);
-- --------------------------------------------------------
--
-- Struktur dari tabel `users`
--
CREATE TABLE `users` (
`id_user` int(11) NOT NULL,
`id_staff` int(11) NOT NULL,
`username` varchar(20) NOT NULL,
`password` varchar(255) NOT NULL,
`foto_user` varchar(100) DEFAULT NULL,
`level_user` int(1) NOT NULL,
`last_login` datetime NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data untuk tabel `users`
--
INSERT INTO `users` (`id_user`, `id_staff`, `username`, `password`, `foto_user`, `level_user`, `last_login`) VALUES
(2, 1, 'administrator', '<PASSWORD>', 'tNAB87RTSx.png', 1, '2019-10-16 22:03:56'),
(16, 9, '<EMAIL>', '<PASSWORD>', 'BaC6jlALSb.png', 2, '0000-00-00 00:00:00'),
(18, 11, 'superadmin', '<PASSWORD>', 'kXdhL8D2SY.png', 1, '2020-07-18 09:05:32'),
(19, 14, 'admin', '<PASSWORD>', 'avatar_default.png', 2, '2020-02-18 08:26:10'),
(20, 12, 'nia', '81dc9bdb52d04dc20036dbd8313ed055', 'HXdZA7nOEl.png', 1, '0000-00-00 00:00:00');
--
-- Indexes for dumped tables
--
--
-- Indeks untuk tabel `mt_category`
--
ALTER TABLE `mt_category`
ADD PRIMARY KEY (`id_kategori`);
--
-- Indeks untuk tabel `mt_items`
--
ALTER TABLE `mt_items`
ADD PRIMARY KEY (`id_item`),
ADD KEY `id_kategori` (`id_kategori`);
--
-- Indeks untuk tabel `mt_payment_methode`
--
ALTER TABLE `mt_payment_methode`
ADD PRIMARY KEY (`id_payment`);
--
-- Indeks untuk tabel `mt_staff`
--
ALTER TABLE `mt_staff`
ADD PRIMARY KEY (`id_staff`);
--
-- Indeks untuk tabel `ts_order`
--
ALTER TABLE `ts_order`
ADD PRIMARY KEY (`id_invoice`);
--
-- Indeks untuk tabel `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id_user`);
--
-- AUTO_INCREMENT untuk tabel yang dibuang
--
--
-- AUTO_INCREMENT untuk tabel `mt_category`
--
ALTER TABLE `mt_category`
MODIFY `id_kategori` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT untuk tabel `mt_payment_methode`
--
ALTER TABLE `mt_payment_methode`
MODIFY `id_payment` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT untuk tabel `mt_staff`
--
ALTER TABLE `mt_staff`
MODIFY `id_staff` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=15;
--
-- AUTO_INCREMENT untuk tabel `users`
--
ALTER TABLE `users`
MODIFY `id_user` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=21;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<filename>piiyel19/db.sql
CREATE TABLE oauth_clients (
client_id VARCHAR(80) NOT NULL,
client_secret VARCHAR(80),
redirect_uri VARCHAR(2000),
grant_types VARCHAR(80),
scope VARCHAR(4000),
user_id VARCHAR(80),
PRIMARY KEY (client_id)
);
CREATE TABLE oauth_access_tokens (
access_token VARCHAR(40) NOT NULL,
client_id VARCHAR(80) NOT NULL,
user_id VARCHAR(80),
expires TIMESTAMP NOT NULL,
scope VARCHAR(4000),
PRIMARY KEY (access_token)
);
CREATE TABLE oauth_authorization_codes (
authorization_code VARCHAR(40) NOT NULL,
client_id VARCHAR(80) NOT NULL,
user_id VARCHAR(80),
redirect_uri VARCHAR(2000),
expires TIMESTAMP NOT NULL,
scope VARCHAR(4000),
id_token VARCHAR(1000),
PRIMARY KEY (authorization_code)
);
CREATE TABLE oauth_refresh_tokens (
refresh_token VARCHAR(40) NOT NULL,
client_id VARCHAR(80) NOT NULL,
user_id VARCHAR(80),
expires TIMESTAMP NOT NULL,
scope VARCHAR(4000),
PRIMARY KEY (refresh_token)
);
CREATE TABLE oauth_users (
username VARCHAR(80),
password VARCHAR(80),
first_name VARCHAR(80),
last_name VARCHAR(80),
email VARCHAR(80),
email_verified BOOLEAN,
scope VARCHAR(4000),
PRIMARY KEY (username)
);
CREATE TABLE oauth_scopes (
scope VARCHAR(80) NOT NULL,
is_default BOOLEAN,
PRIMARY KEY (scope)
);
CREATE TABLE oauth_jwt (
client_id VARCHAR(80) NOT NULL,
subject VARCHAR(80),
public_key VARCHAR(2000) NOT NULL
); |
CREATE TABLE [dbo].[financial_agreement] (
[id] INT IDENTITY (1, 1) NOT NULL,
[responsible_id] INT NOT NULL,
[person_id] INT NOT NULL,
[currency_id] INT NOT NULL,
[full_value] DECIMAL (12, 2) NOT NULL,
[remanescent_value] DECIMAL (12, 2) DEFAULT ((0)) NOT NULL,
[product_id] INT NOT NULL,
[created_on] DATETIME DEFAULT (getUTCdate()) NOT NULL,
[archived] BIT DEFAULT ((0)) NOT NULL,
[branch_product_id] INT NULL,
[end_date] DATETIME NOT NULL,
[cover_until] DATETIME NOT NULL,
PRIMARY KEY CLUSTERED ([id] ASC),
CONSTRAINT [fk_financial_agreement_person] FOREIGN KEY ([person_id]) REFERENCES [dbo].[person] ([id]),
CONSTRAINT [fk_financial_agreenment_responsible] FOREIGN KEY ([responsible_id]) REFERENCES [dbo].[person] ([id])
);
|
/*
Deployment script for ObservationsSACTN
This code was generated by a tool.
Changes to this file may cause incorrect behavior and will be lost if
the code is regenerated.
*/
GO
SET ANSI_NULLS, ANSI_PADDING, ANSI_WARNINGS, ARITHABORT, CONCAT_NULL_YIELDS_NULL, QUOTED_IDENTIFIER ON;
SET NUMERIC_ROUNDABORT OFF;
GO
:setvar DatabaseName "ObservationsSACTN"
:setvar DefaultFilePrefix "ObservationsSACTN"
:setvar DefaultDataPath "D:\Program Files\Microsoft SQL Server\MSSQL14.SAEON\MSSQL\DATA\"
:setvar DefaultLogPath "D:\Program Files\Microsoft SQL Server\MSSQL14.SAEON\MSSQL\DATA\"
GO
:on error exit
GO
/*
Detect SQLCMD mode and disable script execution if SQLCMD mode is not supported.
To re-enable the script after enabling SQLCMD mode, execute the following:
SET NOEXEC OFF;
*/
:setvar __IsSqlCmdEnabled "True"
GO
IF N'$(__IsSqlCmdEnabled)' NOT LIKE N'True'
BEGIN
PRINT N'SQLCMD mode must be enabled to successfully execute this script.';
SET NOEXEC ON;
END
GO
USE [$(DatabaseName)];
GO
IF EXISTS (SELECT 1
FROM [master].[dbo].[sysdatabases]
WHERE [name] = N'$(DatabaseName)')
BEGIN
ALTER DATABASE [$(DatabaseName)]
SET TEMPORAL_HISTORY_RETENTION ON
WITH ROLLBACK IMMEDIATE;
END
GO
PRINT N'Altering [dbo].[ImportBatch]...';
GO
ALTER TABLE [dbo].[ImportBatch]
ADD [DurationInSecs] INT NULL;
GO
PRINT N'Creating [dbo].[ImportBatch].[IX_ImportBatch_DurationInSecs]...';
GO
CREATE NONCLUSTERED INDEX [IX_ImportBatch_DurationInSecs]
ON [dbo].[ImportBatch]([DurationInSecs] ASC);
GO
PRINT N'Creating [dbo].[ImportBatchSummary].[IX_ImportBatchSummary_Count]...';
GO
CREATE NONCLUSTERED INDEX [IX_ImportBatchSummary_Count]
ON [dbo].[ImportBatchSummary]([Count] ASC);
GO
PRINT N'Creating [dbo].[ImportBatchSummary].[IX_ImportBatchSummary_EndDate]...';
GO
CREATE NONCLUSTERED INDEX [IX_ImportBatchSummary_EndDate]
ON [dbo].[ImportBatchSummary]([EndDate] ASC);
GO
PRINT N'Creating [dbo].[ImportBatchSummary].[IX_ImportBatchSummary_StartDate]...';
GO
CREATE NONCLUSTERED INDEX [IX_ImportBatchSummary_StartDate]
ON [dbo].[ImportBatchSummary]([StartDate] ASC);
GO
PRINT N'Refreshing [dbo].[vImportBatch]...';
GO
EXECUTE sp_refreshsqlmodule N'[dbo].[vImportBatch]';
GO
PRINT N'Refreshing [dbo].[vObservationExpansion]...';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER OFF;
GO
EXECUTE sp_refreshsqlmodule N'[dbo].[vObservationExpansion]';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER ON;
GO
PRINT N'Refreshing [dbo].[vObservation]...';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER OFF;
GO
EXECUTE sp_refreshsqlmodule N'[dbo].[vObservation]';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER ON;
GO
PRINT N'Refreshing [dbo].[vObservationJSON]...';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER OFF;
GO
EXECUTE sp_refreshsqlmodule N'[dbo].[vObservationJSON]';
GO
SET ANSI_NULLS, QUOTED_IDENTIFIER ON;
GO
PRINT N'Update complete.';
GO
|
<filename>kurulum/zwork-personel-tablosu.sql
-- phpMyAdmin SQL Dump
-- version 4.0.8
-- http://www.phpmyadmin.net
--
-- Anamakine: localhost
-- Üretim Zamanı: 06 Mar 2019, 09:36:54
-- Sunucu sürümü: 5.6.27-log
-- PHP Sürümü: 5.3.26
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Veritabanı: `zwork`
--
-- --------------------------------------------------------
--
-- Tablo için tablo yapısı `zw_personel`
--
CREATE TABLE IF NOT EXISTS `zw_personel` (
`ID` int(11) NOT NULL AUTO_INCREMENT,
`admin` tinyint(1) DEFAULT '0',
`oturumID` varchar(32) COLLATE utf8_unicode_ci DEFAULT NULL,
`kullanici` varchar(32) COLLATE utf8_unicode_ci DEFAULT NULL,
`sifre` varchar(32) COLLATE utf8_unicode_ci DEFAULT NULL,
`eposta` varchar(50) COLLATE utf8_unicode_ci DEFAULT NULL,
`tarihG` datetime DEFAULT NULL,
PRIMARY KEY (`ID`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci AUTO_INCREMENT=2 ;
--
-- Tablo döküm verisi `zw_personel`
--
INSERT INTO `zw_personel` (`ID`, `admin`, `oturumID`, `kullanici`, `sifre`, `eposta`, `tarihG`) VALUES
(1, 1, '141e8e041ebd87e5481af791c4a0f815', 'zwork', '4297f44b13955235245b2497399d7a93', '<EMAIL>', '2017-06-02 18:55:43');
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<gh_stars>0
--
-- Table structure for table `products`
--
DROP TABLE IF EXISTS `products`;
CREATE TABLE `products` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT,
`sku` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`name` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`price` decimal(8, 2) NOT NULL,
`type` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
`attribute` varchar(255) COLLATE utf8mb4_general_ci NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
|
<reponame>Rizky-190180085/restful-server-perpus
-- phpMyAdmin SQL Dump
-- version 4.7.0
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Dec 17, 2021 at 04:51 PM
-- Server version: 10.1.25-MariaDB
-- PHP Version: 7.1.7
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_perpus_rest`
--
-- --------------------------------------------------------
--
-- Table structure for table `keys`
--
CREATE TABLE `keys` (
`id` int(11) NOT NULL,
`user_id` int(11) NOT NULL,
`key` varchar(40) NOT NULL,
`level` int(2) NOT NULL,
`ignore_limits` tinyint(1) NOT NULL DEFAULT '0',
`is_private_key` tinyint(1) NOT NULL DEFAULT '0',
`ip_addresses` text,
`date_created` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `keys`
--
INSERT INTO `keys` (`id`, `user_id`, `key`, `level`, `ignore_limits`, `is_private_key`, `ip_addresses`, `date_created`) VALUES
(1, 1, '1234', 1, 0, 0, NULL, 1),
(2, 1, 'buku123', 1, 0, 0, NULL, 0);
-- --------------------------------------------------------
--
-- Table structure for table `limits`
--
CREATE TABLE `limits` (
`id` int(11) NOT NULL,
`uri` varchar(255) NOT NULL,
`count` int(10) NOT NULL,
`hour_started` int(11) NOT NULL,
`api_key` varchar(40) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Dumping data for table `limits`
--
INSERT INTO `limits` (`id`, `uri`, `count`, `hour_started`, `api_key`) VALUES
(6, 'uri:buku/index:get', 5, 1639755231, '1234'),
(7, 'uri:buku/add:post', 3, 1639755132, '1234'),
(8, 'uri:buku/delete:delete', 1, 1639755256, '1234'),
(9, 'uri:buku/update:put', 3, 1639755288, '1234'),
(10, 'uri:buku/index:get', 2, 1639755695, 'buku123');
-- --------------------------------------------------------
--
-- Table structure for table `tb_buku`
--
CREATE TABLE `tb_buku` (
`id_buku` int(10) NOT NULL,
`judul_buku` varchar(50) NOT NULL,
`penulis` varchar(30) NOT NULL,
`penerbit` varchar(50) NOT NULL,
`tahun_terbit` varchar(30) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `tb_buku`
--
INSERT INTO `tb_buku` (`id_buku`, `judul_buku`, `penulis`, `penerbit`, `tahun_terbit`) VALUES
(11009, 'The Chronicles of Narnia : The Lion, the Witch and', '<NAME>', 'Garmedia', '1950'),
(11010, 'The Chronicles of Narnia : Prince Caspian: The Ret', '<NAME>', 'Garmedia', '1951'),
(11011, 'The Lord of The Rings : The Return of the King', '<NAME>', 'Garmedia', '1954');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `keys`
--
ALTER TABLE `keys`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `limits`
--
ALTER TABLE `limits`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tb_buku`
--
ALTER TABLE `tb_buku`
ADD PRIMARY KEY (`id_buku`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `keys`
--
ALTER TABLE `keys`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `limits`
--
ALTER TABLE `limits`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11;
--
-- AUTO_INCREMENT for table `tb_buku`
--
ALTER TABLE `tb_buku`
MODIFY `id_buku` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11012;COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
-- phpMyAdmin SQL Dump
-- version 5.0.1
-- https://www.phpmyadmin.net/
--
-- Host: imdbdb
-- Generation Time: Aug 05, 2021 at 11:13 PM
-- Server version: 10.6.3-MariaDB-1:10.6.3+maria~focal
-- PHP Version: 7.4.1
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `imdbscrapper`
--
CREATE DATABASE IF NOT EXISTS `imdbscrapper` DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci;
USE `imdbscrapper`;
DELIMITER $$
--
-- Procedures
--
CREATE DEFINER=`root`@`%` PROCEDURE `checkDuplicateIgnore` (IN `idCheck` BIGINT(20)) BEGIN
SELECT ignoreList.idIgnore
FROM ignoreList
WHERE ignoreList.idIgnore = idCheck;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `checkDuplicateMovie` (IN `idCheck` BIGINT(20)) BEGIN
SELECT movies.idMovie
FROM movies
WHERE movies.idMovie = idCheck;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `checkDuplicateRecheck` (IN `idCheck` BIGINT(20)) BEGIN
SELECT recheck.idRecheck
FROM recheck
WHERE recheck.idRecheck = idCheck;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `checkDuplicateSerie` (IN `idCheck` BIGINT(20)) BEGIN
SELECT series.idSerie
FROM series
WHERE series.idSerie = idCheck;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getDuplicateMovies` () BEGIN
SELECT movies.idMovie, movies.name
FROM movies
GROUP BY movies.name
HAVING COUNT(movies.name) > 1;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getDuplicateSeries` () BEGIN
SELECT series.idSerie, series.name
FROM series
GROUP BY series.name
HAVING COUNT(series.name) > 1;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getMovieByName` (IN `movieName` VARCHAR(255)) BEGIN
SELECT movies.idMovie, movies.name, movies.description, movies.imdbURL, movies.rating, movies.ratingCount, movies.releaseDate
FROM movies
WHERE movies.name = movieName;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getMovies` (IN `valueRating` DOUBLE, IN `valueRatingCount` BIGINT(20), IN `valueReleaseDate` DATE) BEGIN
SELECT movies.idMovie, movies.name, movies.description, movies.imdbURL, movies.rating, movies.ratingCount, movies.releaseDate
FROM movies
WHERE movies.rating >= valueRating
AND movies.ratingCount >= valueRatingCount
AND movies.releaseDate >= valueReleaseDate
ORDER by movies.rating DESC, movies.ratingCount DESC;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getSerieByName` (IN `serieName` VARCHAR(255)) BEGIN
SELECT series.idSerie, series.name, series.description, series.imdbURL, series.rating, series.ratingCount, series.releaseDate
FROM series
WHERE series.name = serieName;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `getSeries` (IN `valueRating` DOUBLE, IN `valueRatingCount` BIGINT(20), IN `valueReleaseDate` DATE) BEGIN
SELECT series.idSerie, series.name, series.description, series.imdbURL, series.rating, series.ratingCount, series.releaseDate
FROM series
WHERE series.rating >= valueRating
AND series.ratingCount >= valueRatingCount
AND series.releaseDate >= valueReleaseDate
ORDER by series.rating DESC, series.ratingCount DESC;
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertIgnore` (IN `inIDIgnore` BIGINT(20)) BEGIN
INSERT INTO ignoreList
(`idIgnore`)
VALUES(inIDIgnore);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertMovie` (`idMovie` BIGINT(20), `name` VARCHAR(255), `description` LONGTEXT, `imdbURL` VARCHAR(255), `rating` DOUBLE, `ratingCount` BIGINT(20), `releaseDate` DATE) BEGIN
INSERT INTO movies
(`idmovie`, `name`, `description`, `imdbURL`, `rating`, `ratingCount`, `releaseDate`)
VALUES(idMovie, name, description, imdbURL, rating, ratingCount, releaseDate);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertMovieGenre` (IN `idMovie` BIGINT(20), IN `idGenre` VARCHAR(255)) BEGIN
INSERT INTO moviesGenre
(`idMovie`, `idGenre`)
VALUES(idMovie, idGenre);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertRecheck` (IN `inIDRecheck` BIGINT(20)) BEGIN
INSERT INTO recheck
(`idRecheck`)
VALUES(inIDRecheck);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertSerie` (`idSerie` BIGINT(20), `name` VARCHAR(255), `description` LONGTEXT, `imdbURL` VARCHAR(255), `rating` DOUBLE, `ratingCount` BIGINT(20), `releaseDate` DATE) BEGIN
INSERT INTO series
(`idserie`, `name`, `description`, `imdbURL`, `rating`, `ratingCount`, `releaseDate`)
VALUES(idSerie, name, description, imdbURL, rating, ratingCount, releaseDate);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `insertSerieGenre` (IN `idSerie` BIGINT(20), IN `idGenre` VARCHAR(255)) BEGIN
INSERT INTO seriesGenre
(`idSerie`, `idGenre`)
VALUES(idSerie, idGenre);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `removeDuplicateMovie` (IN `inMovie` BIGINT(20), IN `inName` VARCHAR(255)) BEGIN
DELETE FROM movies
WHERE movies.idMovie = inMovie;
DELETE FROM moviesGenre
WHERE moviesGenre.idMovie = inMovie;
INSERT INTO duplicateMovies
(duplicateMovies.idMovie, duplicateMovies.nameMovie)
VALUES (inMovie, inName);
INSERT INTO ignoreList
(ignoreList.idIgnore)
VALUES (inMovie);
END$$
CREATE DEFINER=`root`@`%` PROCEDURE `removeDuplicateSerie` (IN `inSerie` BIGINT(20), IN `inName` VARCHAR(255)) BEGIN
DELETE FROM series
WHERE series.idSerie = inSerie;
DELETE FROM seriesGenre
WHERE seriesGenre.idSerie = inSerie;
INSERT INTO duplicateSeries
(duplicateSeries.idSerie, duplicateSeries.nameSerie)
VALUES (inSerie, inName);
INSERT INTO ignoreList
(ignoreList.idIgnore)
VALUES (inSerie);
END$$
DELIMITER ;
-- --------------------------------------------------------
--
-- Table structure for table `duplicateMovies`
--
CREATE TABLE `duplicateMovies` (
`idMovie` bigint(20) NOT NULL,
`nameMovie` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `duplicateSeries`
--
CREATE TABLE `duplicateSeries` (
`idSerie` bigint(20) NOT NULL,
`nameSerie` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `ignoreList`
--
CREATE TABLE `ignoreList` (
`idIgnore` bigint(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `movies`
--
CREATE TABLE `movies` (
`idMovie` bigint(20) NOT NULL,
`name` varchar(255) NOT NULL,
`description` longtext DEFAULT NULL,
`imdbURL` varchar(255) NOT NULL,
`rating` double DEFAULT NULL,
`ratingCount` bigint(20) DEFAULT NULL,
`releaseDate` date DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `moviesGenre`
--
CREATE TABLE `moviesGenre` (
`id` bigint(20) NOT NULL,
`idMovie` bigint(20) NOT NULL,
`idGenre` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `recheck`
--
CREATE TABLE `recheck` (
`idRecheck` bigint(20) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `series`
--
CREATE TABLE `series` (
`idSerie` bigint(20) NOT NULL,
`name` varchar(255) NOT NULL,
`description` longtext DEFAULT NULL,
`imdbURL` varchar(255) NOT NULL,
`rating` double DEFAULT NULL,
`ratingCount` bigint(20) DEFAULT NULL,
`releaseDate` date DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- --------------------------------------------------------
--
-- Table structure for table `seriesGenre`
--
CREATE TABLE `seriesGenre` (
`id` bigint(20) NOT NULL,
`idSerie` bigint(20) NOT NULL,
`idGenre` varchar(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `duplicateMovies`
--
ALTER TABLE `duplicateMovies`
ADD PRIMARY KEY (`idMovie`);
--
-- Indexes for table `duplicateSeries`
--
ALTER TABLE `duplicateSeries`
ADD PRIMARY KEY (`idSerie`);
--
-- Indexes for table `ignoreList`
--
ALTER TABLE `ignoreList`
ADD PRIMARY KEY (`idIgnore`);
--
-- Indexes for table `movies`
--
ALTER TABLE `movies`
ADD PRIMARY KEY (`idMovie`);
--
-- Indexes for table `moviesGenre`
--
ALTER TABLE `moviesGenre`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `recheck`
--
ALTER TABLE `recheck`
ADD PRIMARY KEY (`idRecheck`);
--
-- Indexes for table `series`
--
ALTER TABLE `series`
ADD PRIMARY KEY (`idSerie`);
--
-- Indexes for table `seriesGenre`
--
ALTER TABLE `seriesGenre`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `movies`
--
ALTER TABLE `movies`
MODIFY `idMovie` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `moviesGenre`
--
ALTER TABLE `moviesGenre`
MODIFY `id` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `series`
--
ALTER TABLE `series`
MODIFY `idSerie` bigint(20) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `seriesGenre`
--
ALTER TABLE `seriesGenre`
MODIFY `id` bigint(20) NOT NULL AUTO_INCREMENT;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<reponame>hosamn/Speech-Diseases-Clinic<filename>sqlCode.sql<gh_stars>0
CREATE TABLE `patient` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`name` TEXT NOT NULL UNIQUE,
`dobDay` INTEGER,
`dobMonth` INTEGER,
`dobYear` INTEGER,
`sex` TEXT,
`nationality` TEXT,
`marital` TEXT,
`job` TEXT,
`address` TEXT,
`phoneMobile` TEXT,
`phoneHome` TEXT,
`phoneWork` TEXT,
`referrer` TEXT
);
CREATE TABLE `test` (
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`patientID` INTEGER NOT NULL,
`dateDay` INTEGER,
`dateMonth` INTEGER,
`dateYear` INTEGER,
`hospital` TEXT,
`endoscope` TEXT,
`diag` TEXT,
`hist` TEXT,
`pasTG` TEXT,
`pasTB` TEXT,
`pasNG` TEXT,
`pasNB` TEXT,
`pasHG` TEXT,
`pasHB` TEXT,
`pasPG` TEXT,
`pasPB` TEXT,
`pasMG` TEXT,
`pasMB` TEXT,
`pasSG` TEXT,
`pasSB` TEXT,
`residue` TEXT,
`others` TEXT,
`recomm` TEXT
);
|
<filename>SistemaTaller.BackEnd.DB/Stored Procedures/SP_Reparaciones_Delete.sql<gh_stars>0
CREATE PROCEDURE SP_Reparaciones_Delete
@NumeroReparacion INT
AS
BEGIN
SELECT * FROM Reparaciones WHERE NumeroReparacion = @NumeroReparacion
UPDATE Reparaciones SET Activo = 0 WHERE NumeroReparacion = NumeroReparacion
END |
<filename>script.sql
CREATE DATABASE `petinder` /*!40100 DEFAULT CHARACTER SET utf8mb4 */;
CREATE TABLE `password_resets` (
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
KEY `password_resets_email_index` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE `users` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`telefone` varchar(25) COLLATE utf8mb4_unicode_ci NOT NULL,
`endereco` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`password` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `users_email_unique` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE `pet` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`dono_id` bigint(20) unsigned NOT NULL NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`idade` bigint(20) COLLATE utf8mb4_unicode_ci NOT NULL,
`raca` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`especie` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE `chat` (
`id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`msg` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`sender_id` bigint(20) COLLATE utf8mb4_unicode_ci NOT NULL,
`receiver_id` bigint(20) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; |
<gh_stars>1-10
SET DEFINE OFF;
CREATE UNIQUE INDEX AFW_12_LIEN_SA_SCENR_NOTFC_UK1 ON AFW_12_LIEN_SA_SCENR_NOTFC
(REF_STRUC_APLIC, REF_SCENR_NOTFC)
LOGGING
/
|
<reponame>xandout/hdbcli
SET SCHEMA MITCHELL;
INSERT INTO TESTING VALUES ('moose2');
/exit
|
<reponame>WeilerWebServices/PostgreSQL<filename>pgAdmin/tools/sqleditor/templates/sqleditor/sql/default/objectname.sql
{# ============= Fetch the schema and object name for given object id ============= #}
{% if obj_id %}
SELECT n.nspname, r.relname
FROM pg_class r
LEFT JOIN pg_namespace n ON (r.relnamespace = n.oid)
WHERE r.oid = {{obj_id}};
{% endif %}
|
USE [ANTERO]
GO
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dw].[p_lataa_d_koulutusluokitus]
AS
if not exists (select * from dw.d_koulutusluokitus where id=-1) begin
set identity_insert dw.d_koulutusluokitus on;
insert into dw.d_koulutusluokitus (
id,
koulutusluokitus_avain,
koulutusluokitus_koodi,koulutusluokitus_fi,koulutusluokitus_sv,koulutusluokitus_en,
koulutusastetaso1_koodi,koulutusastetaso1_fi,koulutusastetaso1_sv,koulutusastetaso1_en,
koulutusastetaso2_koodi,koulutusastetaso2_fi,koulutusastetaso2_sv,koulutusastetaso2_en,
koulutusalataso1_koodi,koulutusalataso1_fi,koulutusalataso1_sv,koulutusalataso1_en,
koulutusalataso2_koodi,koulutusalataso2_fi,koulutusalataso2_sv,koulutusalataso2_en,
koulutusalataso3_koodi,koulutusalataso3_fi,koulutusalataso3_sv,koulutusalataso3_en,
okmohjauksenala_koodi,okmohjauksenala_fi,okmohjauksenala_sv,okmohjauksenala_en,
koulutusaste2002_koodi,koulutusaste2002_fi,koulutusaste2002_sv,koulutusaste2002_en,
koulutusala2002_koodi,koulutusala2002_fi,koulutusala2002_sv,koulutusala2002_en,
opintoala2002_koodi,opintoala2002_fi,opintoala2002_sv,opintoala2002_en,
opintoala1995_koodi,opintoala1995_fi,opintoala1995_sv,opintoala1995_en,
koulutussektori_koodi, koulutussektori_fi, koulutussektori_sv, koulutussektori_en,
tutkintotyyppi_koodi, tutkintotyyppi_fi, tutkintotyyppi_sv, tutkintotyyppi_en,
source
)
select
-1,
koodi,
koodi,nimi,nimi_sv,nimi_en, --koulutus
koodi,nimi,nimi_sv,nimi_en, --astetaso1
koodi,nimi,nimi_sv,nimi_en, --astetaso2
koodi,nimi,nimi_sv,nimi_en, --alataso1
koodi,nimi,nimi_sv,nimi_en, --alataso2
koodi,nimi,nimi_sv,nimi_en, --alataso3
koodi,nimi,nimi_sv,nimi_en, --ohjauksenala
koodi,nimi,nimi_sv,nimi_en, --koulutusaste2002
koodi,nimi,nimi_sv,nimi_en, --koulutusala2002
koodi,nimi,nimi_sv,nimi_en, --opintoala2002
koodi,nimi,nimi_sv,nimi_en, --opintoala1995
koodi,nimi,nimi_sv,nimi_en, --koulutussektori
koodi,nimi,nimi_sv,nimi_en, --tutkintotyyppi
source
from sa.sa_koodistot
where koodisto='vipunenmeta'
and koodi='-1'
;
set identity_insert dw.d_koulutusluokitus off;
end else begin
update d
set
koulutusluokitus_avain=s.koodi,
koulutusluokitus_koodi=s.koodi,koulutusluokitus_fi=s.nimi,koulutusluokitus_sv=s.nimi_sv,koulutusluokitus_en=s.nimi_en,
koulutusastetaso1_koodi=s.koodi,koulutusastetaso1_fi=s.nimi,koulutusastetaso1_sv=s.nimi_sv,koulutusastetaso1_en=s.nimi_en,
koulutusastetaso2_koodi=s.koodi,koulutusastetaso2_fi=s.nimi,koulutusastetaso2_sv=s.nimi_sv,koulutusastetaso2_en=s.nimi_en,
koulutusalataso1_koodi=s.koodi,koulutusalataso1_fi=s.nimi,koulutusalataso1_sv=s.nimi_sv,koulutusalataso1_en=s.nimi_en,
koulutusalataso2_koodi=s.koodi,koulutusalataso2_fi=s.nimi,koulutusalataso2_sv=s.nimi_sv,koulutusalataso2_en=s.nimi_en,
koulutusalataso3_koodi=s.koodi,koulutusalataso3_fi=s.nimi,koulutusalataso3_sv=s.nimi_sv,koulutusalataso3_en=s.nimi_en,
okmohjauksenala_koodi=s.koodi,okmohjauksenala_fi=s.nimi,okmohjauksenala_sv=s.nimi_sv,okmohjauksenala_en=s.nimi_en,
koulutusaste2002_koodi=s.koodi,koulutusaste2002_fi=s.nimi,koulutusaste2002_sv=s.nimi_sv,koulutusaste2002_en=s.nimi_en,
koulutusala2002_koodi=s.koodi,koulutusala2002_fi=s.nimi,koulutusala2002_sv=s.nimi_sv,koulutusala2002_en=s.nimi_en,
opintoala2002_koodi=s.koodi,opintoala2002_fi=s.nimi,opintoala2002_sv=s.nimi_sv,opintoala2002_en=s.nimi_en,
opintoala1995_koodi=s.koodi,opintoala1995_fi=s.nimi,opintoala1995_sv=s.nimi_sv,opintoala1995_en=s.nimi_en,
koulutussektori_koodi=s.koodi,koulutussektori_fi=s.nimi,koulutussektori_sv=s.nimi_sv,koulutussektori_en=s.nimi_en,
tutkintotyyppi_koodi=s.koodi,tutkintotyyppi_fi=s.nimi,tutkintotyyppi_sv=s.nimi_sv,tutkintotyyppi_en=s.nimi_en,
source='ETL: p_lataa_d_koulutusluokitus'
from dw.d_koulutusluokitus d
join sa.sa_koodistot s on s.koodi=d.koulutusluokitus_koodi
where s.koodisto='vipunenmeta'
and s.koodi='-1'
;
end
MERGE dw.d_koulutusluokitus AS target
USING (
SELECT DISTINCT
'koulutus_'+s.koodi AS avain,
s.koodi,
COALESCE(s.nimi, s.nimi_sv, s.nimi_en) AS nimi,
COALESCE(s.nimi_sv, s.nimi, s.nimi_en) AS nimi_sv,
COALESCE(s.nimi_en, s.nimi, s.nimi_sv) AS nimi_en,
COALESCE(koulutusluokitus2016koulutusastetaso1koodi,t.koodi) as koulutusluokitus2016koulutusastetaso1koodi,
LTRIM(RTRIM( COALESCE(koulutusluokitus2016koulutusastetaso1nimi, koulutusluokitus2016koulutusastetaso1nimi_sv, koulutusluokitus2016koulutusastetaso1nimi_en, t.nimi))) AS koulutusluokitus2016koulutusastetaso1nimi,
LTRIM(RTRIM(COALESCE(koulutusluokitus2016koulutusastetaso1nimi_sv, koulutusluokitus2016koulutusastetaso1nimi, koulutusluokitus2016koulutusastetaso1nimi_en, t.nimi_sv))) AS koulutusluokitus2016koulutusastetaso1nimi_sv,
LTRIM(RTRIM(COALESCE(koulutusluokitus2016koulutusastetaso1nimi_en, koulutusluokitus2016koulutusastetaso1nimi, koulutusluokitus2016koulutusastetaso1nimi_sv, t.nimi_en))) AS koulutusluokitus2016koulutusastetaso1nimi_en,
COALESCE(isced2011koulutusastetaso2koodi,t.koodi) as isced2011koulutusastetaso2koodi,
LTRIM(RTRIM(COALESCE(isced2011koulutusastetaso2nimi, isced2011koulutusastetaso2nimi_sv, isced2011koulutusastetaso2nimi_en, t.nimi))) AS isced2011koulutusastetaso2nimi,
LTRIM(RTRIM( COALESCE(isced2011koulutusastetaso2nimi_sv, isced2011koulutusastetaso2nimi, isced2011koulutusastetaso2nimi_en, t.nimi_sv))) AS isced2011koulutusastetaso2nimi_sv,
LTRIM(RTRIM( COALESCE(isced2011koulutusastetaso2nimi_en, isced2011koulutusastetaso2nimi, isced2011koulutusastetaso2nimi_sv, t.nimi_en))) AS isced2011koulutusastetaso2nimi_en,
COALESCE(koulutusluokitus2016koulutusalataso1koodi,t.koodi) as koulutusluokitus2016koulutusalataso1koodi,
COALESCE(koulutusluokitus2016koulutusalataso1nimi, koulutusluokitus2016koulutusalataso1nimi_sv, koulutusluokitus2016koulutusalataso1nimi_en, t.nimi) AS koulutusluokitus2016koulutusalataso1nimi,
COALESCE(koulutusluokitus2016koulutusalataso1nimi_sv, koulutusluokitus2016koulutusalataso1nimi, koulutusluokitus2016koulutusalataso1nimi_en, t.nimi_sv) AS koulutusluokitus2016koulutusalataso1nimi_sv,
COALESCE(koulutusluokitus2016koulutusalataso1nimi_en, koulutusluokitus2016koulutusalataso1nimi, koulutusluokitus2016koulutusalataso1nimi_sv, t.nimi_en) AS koulutusluokitus2016koulutusalataso1nimi_en,
COALESCE(koulutusluokitus2016koulutusalataso2koodi,t.koodi) as koulutusluokitus2016koulutusalataso2koodi,
COALESCE(koulutusluokitus2016koulutusalataso2nimi, koulutusluokitus2016koulutusalataso2nimi_sv, koulutusluokitus2016koulutusalataso2nimi_en, t.nimi) AS koulutusluokitus2016koulutusalataso2nimi,
COALESCE(koulutusluokitus2016koulutusalataso2nimi_sv, koulutusluokitus2016koulutusalataso2nimi, koulutusluokitus2016koulutusalataso2nimi_en, t.nimi_sv) AS koulutusluokitus2016koulutusalataso2nimi_sv,
COALESCE(koulutusluokitus2016koulutusalataso2nimi_en, koulutusluokitus2016koulutusalataso2nimi, koulutusluokitus2016koulutusalataso2nimi_sv, t.nimi_en) AS koulutusluokitus2016koulutusalataso2nimi_en,
COALESCE(koulutusluokitus2016koulutusalataso3koodi,t.koodi) as koulutusluokitus2016koulutusalataso3koodi,
COALESCE(koulutusluokitus2016koulutusalataso3nimi, koulutusluokitus2016koulutusalataso3nimi_sv, koulutusluokitus2016koulutusalataso3nimi_en, t.nimi) AS koulutusluokitus2016koulutusalataso3nimi,
COALESCE(koulutusluokitus2016koulutusalataso3nimi_sv, koulutusluokitus2016koulutusalataso3nimi, koulutusluokitus2016koulutusalataso3nimi_en, t.nimi_sv) AS koulutusluokitus2016koulutusalataso3nimi_sv,
COALESCE(koulutusluokitus2016koulutusalataso3nimi_en, koulutusluokitus2016koulutusalataso3nimi, koulutusluokitus2016koulutusalataso3nimi_sv, t.nimi_en) AS koulutusluokitus2016koulutusalataso3nimi_en,
COALESCE(okmohjauksenalakoodi,t.koodi) as okmohjauksenalakoodi,
COALESCE(okmohjauksenalanimi, okmohjauksenalanimi_sv, okmohjauksenalanimi_en, t.nimi) AS okmohjauksenalanimi,
COALESCE(okmohjauksenalanimi_sv, okmohjauksenalanimi, okmohjauksenalanimi_en, t.nimi_sv) AS okmohjauksenalanimi_sv,
COALESCE(okmohjauksenalanimi_en, okmohjauksenalanimi, okmohjauksenalanimi_sv, t.nimi_en) AS okmohjauksenalanimi_en,
--koulutusaste2002
COALESCE(koulutusaste2002koodi,t.koodi) as koulutusaste2002koodi,
COALESCE(koulutusaste2002nimi, koulutusaste2002nimi_sv, koulutusaste2002nimi_en, t.nimi) AS koulutusaste2002nimi,
COALESCE(koulutusaste2002nimi_sv, koulutusaste2002nimi, koulutusaste2002nimi_en, t.nimi_sv) AS koulutusaste2002nimi_sv,
COALESCE(koulutusaste2002nimi_en, koulutusaste2002nimi, koulutusaste2002nimi_sv, t.nimi_en) AS koulutusaste2002nimi_en,
--koulutusala2002
COALESCE(koulutusala2002koodi,t.koodi) as koulutusala2002koodi,
COALESCE(koulutusala2002nimi, koulutusala2002nimi_sv, koulutusala2002nimi_en, t.nimi) AS koulutusala2002nimi,
COALESCE(koulutusala2002nimi_sv, koulutusala2002nimi, koulutusala2002nimi_en, t.nimi_sv) AS koulutusala2002nimi_sv,
COALESCE(koulutusala2002nimi_en, koulutusala2002nimi, koulutusala2002nimi_sv, t.nimi_en) AS koulutusala2002nimi_en,
--opintoala2002
COALESCE(opintoala2002koodi,t.koodi) as opintoala2002koodi,
COALESCE(opintoala2002nimi, opintoala2002nimi_sv, opintoala2002nimi_en, t.nimi) AS opintoala2002nimi,
COALESCE(opintoala2002nimi_sv, opintoala2002nimi, opintoala2002nimi_en, t.nimi_sv) AS opintoala2002nimi_sv,
COALESCE(opintoala2002nimi_en, opintoala2002nimi, opintoala2002nimi_sv, t.nimi_en) AS opintoala2002nimi_en,
--opintoala1995
COALESCE(opintoala1995koodi,t.koodi) as opintoala1995koodi,
COALESCE(opintoala1995nimi, opintoala1995nimi_sv, opintoala1995nimi_en, t.nimi) AS opintoala1995nimi,
COALESCE(opintoala1995nimi_sv, opintoala1995nimi, opintoala1995nimi_en, t.nimi_sv) AS opintoala1995nimi_sv,
COALESCE(opintoala1995nimi_en, opintoala1995nimi, opintoala1995nimi_sv, t.nimi_en) AS opintoala1995nimi_en,
--nb! no koulutussektori in sa, yet anyway
-- but we need tutkintotyyppi for evaluating koulutussektori
case
when s.koodi in ('001101','201100') then '1'
when tutkintotyyppikoodi='01' and s.koodi not in ('001101','201100') then '2' --01=Yleissivistävä koulutus
when tutkintotyyppikoodi='02' then '3' --Ammatilliset perustutkinnot
when tutkintotyyppikoodi='03' then '3' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='06' then '4' --Ammattikorkeakoulutus
when tutkintotyyppikoodi='09' then '3' --Muu ammatillinen koulutus
when tutkintotyyppikoodi='10' then (select top 1 koodi from sa.sa_koodistot where koodisto='vipunenmeta' and koodi='-1') --Muu koulutus
when tutkintotyyppikoodi='12' then '4' --Ylempi ammattikorkeakoulututkinto
when tutkintotyyppikoodi='13' then '5' --Alempi korkeakoulututkinto
when tutkintotyyppikoodi='14' then '5' --Ylempi korkeakoulututkinto
when tutkintotyyppikoodi='15' then '5' --Lisensiaatin tutkinto
when tutkintotyyppikoodi='16' then '5' --Tohtorin tutkinto
when tutkintotyyppikoodi='19' then '3' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='20' then '3' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
else t.koodi
end as koulutussektorikoodi,
case
when s.koodi in ('001101','201100') then 'Esi- ja perusopetus'
when tutkintotyyppikoodi='01' and s.koodi not in ('001101','201100') then 'Lukiokoulutus' --01=Yleissivistävä koulutus
when tutkintotyyppikoodi='02' then 'Ammatillinen koulutus' --Ammatilliset perustutkinnot
when tutkintotyyppikoodi='03' then 'Ammatillinen koulutus' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='06' then 'Ammattikorkeakoulukoulutus' --Ammattikorkeakoulutus
when tutkintotyyppikoodi='09' then 'Ammatillinen koulutus' --Muu ammatillinen koulutus
when tutkintotyyppikoodi='10' then (select top 1 nimi from sa.sa_koodistot where koodisto='vipunenmeta' and koodi='-1') --Muu koulutus
when tutkintotyyppikoodi='12' then 'Ammattikorkeakoulukoulutus' --Ylempi ammattikorkeakoulututkinto
when tutkintotyyppikoodi='13' then 'Yliopistokoulutus' --Alempi korkeakoulututkinto
when tutkintotyyppikoodi='14' then 'Yliopistokoulutus' --Ylempi korkeakoulututkinto
when tutkintotyyppikoodi='15' then 'Yliopistokoulutus' --Lisensiaatin tutkinto
when tutkintotyyppikoodi='16' then 'Yliopistokoulutus' --Tohtorin tutkinto
when tutkintotyyppikoodi='19' then 'Ammatillinen koulutus' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='20' then 'Ammatillinen koulutus' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
else t.nimi
end as koulutussektorinimi,
case
when s.koodi in ('001101','201100') then 'Förskoleundervisning och grundläggande utbildning'
when tutkintotyyppikoodi='01' and s.koodi not in ('001101','201100') then 'Gymnasieutbildning' --01=Yleissivistävä koulutus
when tutkintotyyppikoodi='02' then 'Yrkesutbildning' --Ammatilliset perustutkinnot
when tutkintotyyppikoodi='03' then 'Yrkesutbildning' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='06' then 'Yrkeshögskoleutbildning' --Ammattikorkeakoulutus
when tutkintotyyppikoodi='09' then 'Yrkesutbildning' --Muu ammatillinen koulutus
when tutkintotyyppikoodi='10' then (select top 1 nimi_sv from sa.sa_koodistot where koodisto='vipunenmeta' and koodi='-1') --Muu koulutus
when tutkintotyyppikoodi='12' then 'Yrkeshögskoleutbildning' --Ylempi ammattikorkeakoulututkinto
when tutkintotyyppikoodi='13' then 'Universitetsutbildning' --Alempi korkeakoulututkinto
when tutkintotyyppikoodi='14' then 'Universitetsutbildning' --Ylempi korkeakoulututkinto
when tutkintotyyppikoodi='15' then 'Universitetsutbildning' --Lisensiaatin tutkinto
when tutkintotyyppikoodi='16' then 'Universitetsutbildning' --Tohtorin tutkinto
when tutkintotyyppikoodi='19' then 'Yrkesutbildning' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='20' then 'Yrkesutbildning' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
else t.nimi_sv
end as koulutussektorinimi_sv,
case
when s.koodi in ('001101','201100') then 'Pre-primary and basic education'
when tutkintotyyppikoodi='01' and s.koodi not in ('001101','201100') then 'General upper secondary education' --01=Yleissivistävä koulutus
when tutkintotyyppikoodi='02' then 'Vocational education and training' --Ammatilliset perustutkinnot
when tutkintotyyppikoodi='03' then 'Vocational education and training' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='06' then 'University of applied sciences (UAS) education' --Ammattikorkeakoulutus
when tutkintotyyppikoodi='09' then 'Vocational education and training' --Muu ammatillinen koulutus
when tutkintotyyppikoodi='10' then (select top 1 nimi_en from sa.sa_koodistot where koodisto='vipunenmeta' and koodi='-1') --Muu koulutus
when tutkintotyyppikoodi='12' then 'University of applied sciences (UAS) education' --Ylempi ammattikorkeakoulututkinto
when tutkintotyyppikoodi='13' then 'University education' --Alempi korkeakoulututkinto
when tutkintotyyppikoodi='14' then 'University education' --Ylempi korkeakoulututkinto
when tutkintotyyppikoodi='15' then 'University education' --Lisensiaatin tutkinto
when tutkintotyyppikoodi='16' then 'University education' --Tohtorin tutkinto
when tutkintotyyppikoodi='19' then 'Vocational education and training' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
when tutkintotyyppikoodi='20' then 'Vocational education and training' --Näyttötutkintoon valmistava ammatillinen lisäkoulutus
else t.nimi_en
end as koulutussektorinimi_en,
-- and add tutkintotyyppi to dimension as we already have it here
COALESCE(tutkintotyyppikoodi,t.koodi) as tutkintotyyppikoodi,
COALESCE(tutkintotyyppinimi, tutkintotyyppinimi_sv, tutkintotyyppinimi_en, t.nimi) AS tutkintotyyppinimi,
COALESCE(tutkintotyyppinimi_sv, tutkintotyyppinimi, tutkintotyyppinimi_en, t.nimi_sv) AS tutkintotyyppinimi_sv,
COALESCE(tutkintotyyppinimi_en, tutkintotyyppinimi, tutkintotyyppinimi_sv, t.nimi_en) AS tutkintotyyppinimi_en,
s.source
FROM sa.sa_koulutusluokitus s
cross join sa.sa_koodistot as t
where t.koodisto='vipunenmeta' and t.koodi='-1'
) AS src
ON target.koulutusluokitus_avain = src.avain
WHEN MATCHED THEN
UPDATE SET
koulutusluokitus_fi = src.nimi,
koulutusluokitus_sv = src.nimi_sv,
koulutusluokitus_en = src.nimi_en,
koulutusastetaso1_fi = src.koulutusluokitus2016koulutusastetaso1nimi,
koulutusastetaso1_sv = src.koulutusluokitus2016koulutusastetaso1nimi_sv,
koulutusastetaso1_en = src.koulutusluokitus2016koulutusastetaso1nimi_en,
koulutusastetaso2_fi = src.isced2011koulutusastetaso2nimi,
koulutusastetaso2_sv = src.isced2011koulutusastetaso2nimi_sv,
koulutusastetaso2_en = src.isced2011koulutusastetaso2nimi_en,
koulutusalataso1_koodi = src.koulutusluokitus2016koulutusalataso1koodi,
koulutusalataso1_fi = src.koulutusluokitus2016koulutusalataso1nimi,
koulutusalataso1_sv = src.koulutusluokitus2016koulutusalataso1nimi_sv,
koulutusalataso1_en = src.koulutusluokitus2016koulutusalataso1nimi_en,
koulutusalataso2_fi = src.koulutusluokitus2016koulutusalataso2nimi,
koulutusalataso2_sv = src.koulutusluokitus2016koulutusalataso2nimi_sv,
koulutusalataso2_en = src.koulutusluokitus2016koulutusalataso2nimi_en,
koulutusalataso3_fi = src.koulutusluokitus2016koulutusalataso3nimi,
koulutusalataso3_sv = src.koulutusluokitus2016koulutusalataso3nimi_sv,
koulutusalataso3_en = src.koulutusluokitus2016koulutusalataso3nimi_en,
okmohjauksenala_koodi = src.okmohjauksenalakoodi,
okmohjauksenala_fi = src.okmohjauksenalanimi,
okmohjauksenala_sv = src.okmohjauksenalanimi_sv,
okmohjauksenala_en = src.okmohjauksenalanimi_en,
koulutusaste2002_koodi = src.koulutusaste2002koodi,
koulutusaste2002_fi = src.koulutusaste2002nimi,
koulutusaste2002_sv = src.koulutusaste2002nimi_sv,
koulutusaste2002_en = src.koulutusaste2002nimi_en,
koulutusala2002_koodi = src.koulutusala2002koodi,
koulutusala2002_fi = src.koulutusala2002nimi,
koulutusala2002_sv = src.koulutusala2002nimi_sv,
koulutusala2002_en = src.koulutusala2002nimi_en,
opintoala2002_koodi = src.opintoala2002koodi,
opintoala2002_fi = src.opintoala2002nimi,
opintoala2002_sv = src.opintoala2002nimi_sv,
opintoala2002_en = src.opintoala2002nimi_en,
opintoala1995_koodi = src.opintoala1995koodi,
opintoala1995_fi = src.opintoala1995nimi,
opintoala1995_sv = src.opintoala1995nimi_sv,
opintoala1995_en = src.opintoala1995nimi_en,
tutkintotyyppi_koodi = src.tutkintotyyppikoodi,
tutkintotyyppi_fi = src.tutkintotyyppinimi,
tutkintotyyppi_sv = src.tutkintotyyppinimi_sv,
tutkintotyyppi_en = src.tutkintotyyppinimi_en,
koulutussektori_koodi =src.koulutussektorikoodi,
koulutussektori_fi = src.koulutussektorinimi,
koulutussektori_sv = src.koulutussektorinimi_sv,
koulutussektori_en = koulutussektorinimi_en,
target.source = src.source
WHEN NOT MATCHED THEN
INSERT (
koulutusluokitus_avain,
koulutusluokitus_koodi,koulutusluokitus_fi,koulutusluokitus_sv,koulutusluokitus_en,
koulutusastetaso1_koodi,koulutusastetaso1_fi,koulutusastetaso1_sv,koulutusastetaso1_en,
koulutusastetaso2_koodi,koulutusastetaso2_fi,koulutusastetaso2_sv,koulutusastetaso2_en,
koulutusalataso1_koodi,koulutusalataso1_fi,koulutusalataso1_sv,koulutusalataso1_en,
koulutusalataso2_koodi,koulutusalataso2_fi,koulutusalataso2_sv,koulutusalataso2_en,
koulutusalataso3_koodi,koulutusalataso3_fi,koulutusalataso3_sv,koulutusalataso3_en,
okmohjauksenala_koodi,okmohjauksenala_fi,okmohjauksenala_sv,okmohjauksenala_en,
koulutussektori_koodi,koulutussektori_fi,koulutussektori_sv,koulutussektori_en,
tutkintotyyppi_koodi,tutkintotyyppi_fi,tutkintotyyppi_sv,tutkintotyyppi_en,
koulutusaste2002_koodi,koulutusaste2002_fi,koulutusaste2002_sv,koulutusaste2002_en,
koulutusala2002_koodi,koulutusala2002_fi,koulutusala2002_sv,koulutusala2002_en,
opintoala2002_koodi,opintoala2002_fi,opintoala2002_sv,opintoala2002_en,
opintoala1995_koodi,opintoala1995_fi,opintoala1995_sv,opintoala1995_en,
source
)
VALUES (
src.avain,
src.koodi, src.nimi, src.nimi_sv, src.nimi_en,
koulutusluokitus2016koulutusastetaso1koodi,koulutusluokitus2016koulutusastetaso1nimi,koulutusluokitus2016koulutusastetaso1nimi_sv,koulutusluokitus2016koulutusastetaso1nimi_en,
isced2011koulutusastetaso2koodi,isced2011koulutusastetaso2nimi,isced2011koulutusastetaso2nimi_sv,isced2011koulutusastetaso2nimi_en,
koulutusluokitus2016koulutusalataso1koodi,koulutusluokitus2016koulutusalataso1nimi,koulutusluokitus2016koulutusalataso1nimi_sv,koulutusluokitus2016koulutusalataso1nimi_en,
koulutusluokitus2016koulutusalataso2koodi,koulutusluokitus2016koulutusalataso2nimi,koulutusluokitus2016koulutusalataso2nimi_sv,koulutusluokitus2016koulutusalataso2nimi_en,
koulutusluokitus2016koulutusalataso3koodi,koulutusluokitus2016koulutusalataso3nimi,koulutusluokitus2016koulutusalataso3nimi_sv,koulutusluokitus2016koulutusalataso3nimi_en,
okmohjauksenalakoodi,okmohjauksenalanimi,okmohjauksenalanimi_sv,okmohjauksenalanimi_en,
koulutussektorikoodi,koulutussektorinimi,koulutussektorinimi_sv,koulutussektorinimi_en,
tutkintotyyppikoodi,tutkintotyyppinimi,tutkintotyyppinimi_sv,tutkintotyyppinimi_en,
koulutusaste2002koodi,koulutusaste2002nimi,koulutusaste2002nimi_sv,koulutusaste2002nimi_en,
koulutusala2002koodi,koulutusala2002nimi,koulutusala2002nimi_sv,koulutusala2002nimi_en,
opintoala2002koodi,opintoala2002nimi,opintoala2002nimi_sv,opintoala2002nimi_en,
opintoala1995koodi,opintoala1995nimi,opintoala1995nimi_sv,opintoala1995nimi_en,
src.source
);
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusalataso1_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusalataso1_koodi AS case when koulutusalataso1_koodi = -1 then '99999' else cast(koulutusalataso1_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusalataso2_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusalataso2_koodi AS case when koulutusalataso2_koodi = -1 then '99999' else cast(koulutusalataso2_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusalataso3_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusalataso3_koodi AS case when koulutusalataso3_koodi = '-1' then '99999' else cast(koulutusalataso3_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusastetaso1_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusastetaso1_koodi AS case when koulutusastetaso1_koodi = '-1' then '99999' else cast(koulutusastetaso1_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusastetaso2_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusastetaso2_koodi AS case when koulutusastetaso2_koodi = '-1' then '99999' else cast(koulutusastetaso2_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusluokitus_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusluokitus_koodi AS case when koulutusluokitus_koodi = '-1' then '9999999' else cast(koulutusluokitus_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutussektori_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutussektori_koodi AS case when koulutussektori_koodi = '-1' then '99999' else cast(koulutussektori_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_okmohjauksenala_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_okmohjauksenala_koodi AS case when okmohjauksenala_koodi = '-1' then '99999' else cast(okmohjauksenala_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_tutkintotyyppi_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_tutkintotyyppi_koodi AS case when tutkintotyyppi_koodi = -1 then '99999' else cast(tutkintotyyppi_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusaste2002_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusaste2002_koodi AS case when koulutusaste2002_koodi = -1 then '99999' else cast(koulutusaste2002_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_koulutusala2002_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_koulutusala2002_koodi AS case when koulutusala2002_koodi = -1 then '99999' else cast(koulutusala2002_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_opintoala2002_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_opintoala2002_koodi AS case when opintoala2002_koodi = -1 then '99999' else cast(opintoala2002_koodi as varchar(10))
END
END
IF NOT EXISTS (
SELECT * FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA='dw' AND TABLE_NAME='d_koulutusluokitus' AND COLUMN_NAME='jarjestys_opintoala1995_koodi')
BEGIN
ALTER TABLE dw.d_koulutusluokitus ADD jarjestys_opintoala1995_koodi AS case when opintoala1995_koodi = -1 then '99999' else cast(opintoala1995_koodi as varchar(10))
END
END
|
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_DATABASE START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_DATABASE (
DATABASE_ID BIGINT NOT NULL PRIMARY KEY,
NAME VARCHAR(500) NOT NULL,
PATH VARCHAR NOT NULL
);
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_GROUP START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_GROUP (
GROUP_ID BIGINT NOT NULL PRIMARY KEY,
PRIMARY_GENE_ID BIGINT NOT NULL,
PRIMARY_GENE_TAX_ID BIGINT NOT NULL,
TYPE BIGINT NOT NULL,
DATABASE_ID BIGINT NOT NULL,
CONSTRAINT group_database_id_fkey FOREIGN KEY (DATABASE_ID) REFERENCES CATGENOME.HOMOLOG_DATABASE(DATABASE_ID)
);
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_GROUP_GENE START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_GROUP_GENE (
GROUP_GENE_ID BIGINT NOT NULL PRIMARY KEY,
GROUP_ID BIGINT NOT NULL,
GENE_ID BIGINT NOT NULL,
TAX_ID BIGINT NOT NULL,
DATABASE_ID BIGINT NOT NULL,
CONSTRAINT group_id_fkey FOREIGN KEY (GROUP_ID) REFERENCES CATGENOME.HOMOLOG_GROUP(GROUP_ID)
);
CREATE INDEX HOMOLOG_GROUP_GENE_GENE_ID_IDX ON CATGENOME.HOMOLOG_GROUP_GENE(GENE_ID);
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_GENE_DESC START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_GENE_DESC (
GENE_ID BIGINT NOT NULL PRIMARY KEY,
SYMBOL VARCHAR(500) NOT NULL,
TITLE VARCHAR(500) NOT NULL,
TAX_ID BIGINT NOT NULL,
PROT_GI BIGINT,
PROT_ACC VARCHAR(500),
PROT_LEN BIGINT,
NUC_GI BIGINT,
NUC_ACC VARCHAR(500),
LOCUS_TAG VARCHAR(500)
);
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_GENE_ALIAS START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_GENE_ALIAS (
ALIAS_ID BIGINT NOT NULL PRIMARY KEY,
GENE_ID BIGINT NOT NULL,
NAME VARCHAR(500) NOT NULL,
CONSTRAINT alias_gene_id_fkey FOREIGN KEY (GENE_ID) REFERENCES CATGENOME.HOMOLOG_GENE_DESC(GENE_ID)
);
CREATE SEQUENCE IF NOT EXISTS CATGENOME.S_HOMOLOG_GENE_DOMAIN START WITH 1 INCREMENT BY 1;
CREATE TABLE IF NOT EXISTS CATGENOME.HOMOLOG_GENE_DOMAIN (
DOMAIN_ID BIGINT NOT NULL PRIMARY KEY,
GENE_ID BIGINT NOT NULL,
"BEGIN" BIGINT NOT NULL,
"END" BIGINT NOT NULL,
PSSMID BIGINT NOT NULL,
CDDID VARCHAR(500) NOT NULL,
CDDNAME VARCHAR(500) NOT NULL,
CONSTRAINT domain_gene_id_fkey FOREIGN KEY (GENE_ID) REFERENCES CATGENOME.HOMOLOG_GENE_DESC(GENE_ID)
);
|
<gh_stars>10-100
SELECT "CityMaxCapita_1"."City" AS "City", MAX("CityMaxCapita_1"."Keyword") AS "TEMP(attr:Keyword:nk)(2694177860)(0)", MIN("CityMaxCapita_1"."Keyword") AS "TEMP(attr:Keyword:nk)(4040898713)(0)" FROM "CityMaxCapita_1" WHERE ("CityMaxCapita_1"."Keyword" = 'slut') GROUP BY "CityMaxCapita_1"."City";
|
-- Terminology generated from database [SplendidCRM5_50] on 11/18/2010 1:19:34 AM.
print 'TERMINOLOGY Countries en-us';
GO
set nocount on;
GO
exec dbo.spTERMINOLOGY_InsertOnly N'United States' , N'en-US', null, N'countries_dom' , 1, N'United States';
exec dbo.spTERMINOLOGY_InsertOnly N'Afghanistan' , N'en-US', null, N'countries_dom' , 2, N'Afghanistan';
exec dbo.spTERMINOLOGY_InsertOnly N'Albania' , N'en-US', null, N'countries_dom' , 3, N'Albania';
exec dbo.spTERMINOLOGY_InsertOnly N'Algeria' , N'en-US', null, N'countries_dom' , 4, N'Algeria';
exec dbo.spTERMINOLOGY_InsertOnly N'American Samoa' , N'en-US', null, N'countries_dom' , 5, N'American Samoa';
exec dbo.spTERMINOLOGY_InsertOnly N'Andorra' , N'en-US', null, N'countries_dom' , 6, N'Andorra';
exec dbo.spTERMINOLOGY_InsertOnly N'Angola' , N'en-US', null, N'countries_dom' , 7, N'Angola';
exec dbo.spTERMINOLOGY_InsertOnly N'Anguilla' , N'en-US', null, N'countries_dom' , 8, N'Anguilla';
exec dbo.spTERMINOLOGY_InsertOnly N'Antigua' , N'en-US', null, N'countries_dom' , 9, N'Antigua';
exec dbo.spTERMINOLOGY_InsertOnly N'Argentina' , N'en-US', null, N'countries_dom' , 10, N'Argentina';
exec dbo.spTERMINOLOGY_InsertOnly N'Armenia' , N'en-US', null, N'countries_dom' , 11, N'Armenia';
exec dbo.spTERMINOLOGY_InsertOnly N'Aruba' , N'en-US', null, N'countries_dom' , 12, N'Aruba';
exec dbo.spTERMINOLOGY_InsertOnly N'Ascension Island' , N'en-US', null, N'countries_dom' , 13, N'Ascension Island';
exec dbo.spTERMINOLOGY_InsertOnly N'Australia' , N'en-US', null, N'countries_dom' , 14, N'Australia';
exec dbo.spTERMINOLOGY_InsertOnly N'Austria' , N'en-US', null, N'countries_dom' , 15, N'Austria';
exec dbo.spTERMINOLOGY_InsertOnly N'Azerbaijan' , N'en-US', null, N'countries_dom' , 16, N'Azerbaijan';
exec dbo.spTERMINOLOGY_InsertOnly N'Bahamas' , N'en-US', null, N'countries_dom' , 17, N'Bahamas';
exec dbo.spTERMINOLOGY_InsertOnly N'Bahrain' , N'en-US', null, N'countries_dom' , 18, N'Bahrain';
exec dbo.spTERMINOLOGY_InsertOnly N'Bangladesh' , N'en-US', null, N'countries_dom' , 19, N'Bangladesh';
exec dbo.spTERMINOLOGY_InsertOnly N'Barbados' , N'en-US', null, N'countries_dom' , 20, N'Barbados';
exec dbo.spTERMINOLOGY_InsertOnly N'Barbuda' , N'en-US', null, N'countries_dom' , 21, N'Barbuda';
exec dbo.spTERMINOLOGY_InsertOnly N'Belarus' , N'en-US', null, N'countries_dom' , 22, N'Belarus';
exec dbo.spTERMINOLOGY_InsertOnly N'Belgium' , N'en-US', null, N'countries_dom' , 23, N'Belgium';
exec dbo.spTERMINOLOGY_InsertOnly N'Belize' , N'en-US', null, N'countries_dom' , 24, N'Belize';
exec dbo.spTERMINOLOGY_InsertOnly N'Benin' , N'en-US', null, N'countries_dom' , 25, N'Benin';
exec dbo.spTERMINOLOGY_InsertOnly N'Bermuda' , N'en-US', null, N'countries_dom' , 26, N'Bermuda';
exec dbo.spTERMINOLOGY_InsertOnly N'Bhutan' , N'en-US', null, N'countries_dom' , 27, N'Bhutan';
exec dbo.spTERMINOLOGY_InsertOnly N'Bolivia' , N'en-US', null, N'countries_dom' , 28, N'Bolivia';
exec dbo.spTERMINOLOGY_InsertOnly N'Bosnia' , N'en-US', null, N'countries_dom' , 29, N'Bosnia';
exec dbo.spTERMINOLOGY_InsertOnly N'Botswana' , N'en-US', null, N'countries_dom' , 30, N'Botswana';
exec dbo.spTERMINOLOGY_InsertOnly N'Brazil' , N'en-US', null, N'countries_dom' , 31, N'Brazil';
exec dbo.spTERMINOLOGY_InsertOnly N'Brunei' , N'en-US', null, N'countries_dom' , 32, N'Brunei';
exec dbo.spTERMINOLOGY_InsertOnly N'Bulgaria' , N'en-US', null, N'countries_dom' , 33, N'Bulgaria';
exec dbo.spTERMINOLOGY_InsertOnly N'Burkina Faso' , N'en-US', null, N'countries_dom' , 34, N'Burkina Faso';
exec dbo.spTERMINOLOGY_InsertOnly N'Burundi' , N'en-US', null, N'countries_dom' , 35, N'Burundi';
exec dbo.spTERMINOLOGY_InsertOnly N'Caicos Islands' , N'en-US', null, N'countries_dom' , 36, N'Caicos Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Cambodia' , N'en-US', null, N'countries_dom' , 37, N'Cambodia';
exec dbo.spTERMINOLOGY_InsertOnly N'Cameroon' , N'en-US', null, N'countries_dom' , 38, N'Cameroon';
exec dbo.spTERMINOLOGY_InsertOnly N'Canada' , N'en-US', null, N'countries_dom' , 39, N'Canada';
exec dbo.spTERMINOLOGY_InsertOnly N'Cape Verde' , N'en-US', null, N'countries_dom' , 40, N'Cape Verde';
exec dbo.spTERMINOLOGY_InsertOnly N'Cayman Islands' , N'en-US', null, N'countries_dom' , 41, N'Cayman Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Central African Republic' , N'en-US', null, N'countries_dom' , 42, N'Central African Republic';
exec dbo.spTERMINOLOGY_InsertOnly N'Chad' , N'en-US', null, N'countries_dom' , 43, N'Chad';
exec dbo.spTERMINOLOGY_InsertOnly N'Chile' , N'en-US', null, N'countries_dom' , 44, N'Chile';
exec dbo.spTERMINOLOGY_InsertOnly N'China' , N'en-US', null, N'countries_dom' , 45, N'China';
exec dbo.spTERMINOLOGY_InsertOnly N'Cocos Islands' , N'en-US', null, N'countries_dom' , 46, N'Cocos Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Colombia' , N'en-US', null, N'countries_dom' , 47, N'Colombia';
exec dbo.spTERMINOLOGY_InsertOnly N'Comoros' , N'en-US', null, N'countries_dom' , 48, N'Comoros';
exec dbo.spTERMINOLOGY_InsertOnly N'Congo' , N'en-US', null, N'countries_dom' , 49, N'Congo';
exec dbo.spTERMINOLOGY_InsertOnly N'Cook Islands' , N'en-US', null, N'countries_dom' , 50, N'Cook Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Costa Rica' , N'en-US', null, N'countries_dom' , 51, N'Costa Rica';
exec dbo.spTERMINOLOGY_InsertOnly N'Côte d’Ivoire' , N'en-US', null, N'countries_dom' , 52, N'Côte d’Ivoire';
exec dbo.spTERMINOLOGY_InsertOnly N'Croatia' , N'en-US', null, N'countries_dom' , 53, N'Croatia';
exec dbo.spTERMINOLOGY_InsertOnly N'Cuba' , N'en-US', null, N'countries_dom' , 54, N'Cuba';
exec dbo.spTERMINOLOGY_InsertOnly N'Cyprus' , N'en-US', null, N'countries_dom' , 55, N'Cyprus';
exec dbo.spTERMINOLOGY_InsertOnly N'Czech Republic' , N'en-US', null, N'countries_dom' , 56, N'Czech Republic';
exec dbo.spTERMINOLOGY_InsertOnly N'Denmark' , N'en-US', null, N'countries_dom' , 57, N'Denmark';
exec dbo.spTERMINOLOGY_InsertOnly N'Diego Garcia' , N'en-US', null, N'countries_dom' , 58, N'Diego Garcia';
exec dbo.spTERMINOLOGY_InsertOnly N'Djibouti' , N'en-US', null, N'countries_dom' , 59, N'Djibouti';
exec dbo.spTERMINOLOGY_InsertOnly N'Dominica' , N'en-US', null, N'countries_dom' , 60, N'Dominica';
exec dbo.spTERMINOLOGY_InsertOnly N'Dominican Republic' , N'en-US', null, N'countries_dom' , 61, N'Dominican Republic';
exec dbo.spTERMINOLOGY_InsertOnly N'Ecuador' , N'en-US', null, N'countries_dom' , 62, N'Ecuador';
exec dbo.spTERMINOLOGY_InsertOnly N'Egypt' , N'en-US', null, N'countries_dom' , 63, N'Egypt';
exec dbo.spTERMINOLOGY_InsertOnly N'El Salvador' , N'en-US', null, N'countries_dom' , 64, N'El Salvador';
exec dbo.spTERMINOLOGY_InsertOnly N'Equatorial Guinea' , N'en-US', null, N'countries_dom' , 65, N'Equatorial Guinea';
exec dbo.spTERMINOLOGY_InsertOnly N'Eritrea' , N'en-US', null, N'countries_dom' , 66, N'Eritrea';
exec dbo.spTERMINOLOGY_InsertOnly N'Estonia' , N'en-US', null, N'countries_dom' , 67, N'Estonia';
exec dbo.spTERMINOLOGY_InsertOnly N'Ethiopia' , N'en-US', null, N'countries_dom' , 68, N'Ethiopia';
exec dbo.spTERMINOLOGY_InsertOnly N'Falkland Islands' , N'en-US', null, N'countries_dom' , 69, N'Falkland Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Faroe Islands' , N'en-US', null, N'countries_dom' , 70, N'Faroe Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Fiji Islands' , N'en-US', null, N'countries_dom' , 71, N'Fiji Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Finland' , N'en-US', null, N'countries_dom' , 72, N'Finland';
exec dbo.spTERMINOLOGY_InsertOnly N'France' , N'en-US', null, N'countries_dom' , 73, N'France';
exec dbo.spTERMINOLOGY_InsertOnly N'French Guiana' , N'en-US', null, N'countries_dom' , 74, N'French Guiana';
exec dbo.spTERMINOLOGY_InsertOnly N'French Polynesia' , N'en-US', null, N'countries_dom' , 75, N'French Polynesia';
exec dbo.spTERMINOLOGY_InsertOnly N'Futuna' , N'en-US', null, N'countries_dom' , 76, N'Futuna';
exec dbo.spTERMINOLOGY_InsertOnly N'Gabon' , N'en-US', null, N'countries_dom' , 77, N'Gabon';
exec dbo.spTERMINOLOGY_InsertOnly N'Gambia' , N'en-US', null, N'countries_dom' , 78, N'Gambia';
exec dbo.spTERMINOLOGY_InsertOnly N'Georgia' , N'en-US', null, N'countries_dom' , 79, N'Georgia';
exec dbo.spTERMINOLOGY_InsertOnly N'Germany' , N'en-US', null, N'countries_dom' , 80, N'Germany';
exec dbo.spTERMINOLOGY_InsertOnly N'Ghana' , N'en-US', null, N'countries_dom' , 81, N'Ghana';
exec dbo.spTERMINOLOGY_InsertOnly N'Gibraltar' , N'en-US', null, N'countries_dom' , 82, N'Gibraltar';
exec dbo.spTERMINOLOGY_InsertOnly N'Greece' , N'en-US', null, N'countries_dom' , 83, N'Greece';
exec dbo.spTERMINOLOGY_InsertOnly N'Greenland' , N'en-US', null, N'countries_dom' , 84, N'Greenland';
exec dbo.spTERMINOLOGY_InsertOnly N'Grenada' , N'en-US', null, N'countries_dom' , 85, N'Grenada';
exec dbo.spTERMINOLOGY_InsertOnly N'Grenadines' , N'en-US', null, N'countries_dom' , 86, N'Grenadines';
exec dbo.spTERMINOLOGY_InsertOnly N'Guadeloupe' , N'en-US', null, N'countries_dom' , 87, N'Guadeloupe';
exec dbo.spTERMINOLOGY_InsertOnly N'Guam' , N'en-US', null, N'countries_dom' , 88, N'Guam';
exec dbo.spTERMINOLOGY_InsertOnly N'Guantanamo Bay' , N'en-US', null, N'countries_dom' , 89, N'Guantanamo Bay';
exec dbo.spTERMINOLOGY_InsertOnly N'Guatemala' , N'en-US', null, N'countries_dom' , 90, N'Guatemala';
exec dbo.spTERMINOLOGY_InsertOnly N'Guinea' , N'en-US', null, N'countries_dom' , 91, N'Guinea';
exec dbo.spTERMINOLOGY_InsertOnly N'Guinea-Bissau' , N'en-US', null, N'countries_dom' , 92, N'Guinea-Bissau';
exec dbo.spTERMINOLOGY_InsertOnly N'Guyana' , N'en-US', null, N'countries_dom' , 93, N'Guyana';
exec dbo.spTERMINOLOGY_InsertOnly N'Haiti' , N'en-US', null, N'countries_dom' , 94, N'Haiti';
exec dbo.spTERMINOLOGY_InsertOnly N'Herzegovina' , N'en-US', null, N'countries_dom' , 95, N'Herzegovina';
exec dbo.spTERMINOLOGY_InsertOnly N'Honduras' , N'en-US', null, N'countries_dom' , 96, N'Honduras';
exec dbo.spTERMINOLOGY_InsertOnly N'Hong Kong' , N'en-US', null, N'countries_dom' , 97, N'Hong Kong';
exec dbo.spTERMINOLOGY_InsertOnly N'Hungary' , N'en-US', null, N'countries_dom' , 98, N'Hungary';
exec dbo.spTERMINOLOGY_InsertOnly N'Iceland' , N'en-US', null, N'countries_dom' , 99, N'Iceland';
exec dbo.spTERMINOLOGY_InsertOnly N'India' , N'en-US', null, N'countries_dom' , 100, N'India';
exec dbo.spTERMINOLOGY_InsertOnly N'Indonesia' , N'en-US', null, N'countries_dom' , 101, N'Indonesia';
exec dbo.spTERMINOLOGY_InsertOnly N'Iran' , N'en-US', null, N'countries_dom' , 102, N'Iran';
exec dbo.spTERMINOLOGY_InsertOnly N'Iraq' , N'en-US', null, N'countries_dom' , 103, N'Iraq';
exec dbo.spTERMINOLOGY_InsertOnly N'Ireland' , N'en-US', null, N'countries_dom' , 104, N'Ireland';
exec dbo.spTERMINOLOGY_InsertOnly N'Israel' , N'en-US', null, N'countries_dom' , 105, N'Israel';
exec dbo.spTERMINOLOGY_InsertOnly N'Italy' , N'en-US', null, N'countries_dom' , 106, N'Italy';
exec dbo.spTERMINOLOGY_InsertOnly N'Jamaica' , N'en-US', null, N'countries_dom' , 107, N'Jamaica';
exec dbo.spTERMINOLOGY_InsertOnly N'Japan' , N'en-US', null, N'countries_dom' , 108, N'Japan';
exec dbo.spTERMINOLOGY_InsertOnly N'Jordan' , N'en-US', null, N'countries_dom' , 109, N'Jordan';
exec dbo.spTERMINOLOGY_InsertOnly N'Kazakhstan' , N'en-US', null, N'countries_dom' , 110, N'Kazakhstan';
exec dbo.spTERMINOLOGY_InsertOnly N'Kenya' , N'en-US', null, N'countries_dom' , 111, N'Kenya';
exec dbo.spTERMINOLOGY_InsertOnly N'Kiribati' , N'en-US', null, N'countries_dom' , 112, N'Kiribati';
exec dbo.spTERMINOLOGY_InsertOnly N'Korea' , N'en-US', null, N'countries_dom' , 113, N'Korea';
exec dbo.spTERMINOLOGY_InsertOnly N'Kuwait' , N'en-US', null, N'countries_dom' , 114, N'Kuwait';
exec dbo.spTERMINOLOGY_InsertOnly N'Kyrgyzstan' , N'en-US', null, N'countries_dom' , 115, N'Kyrgyzstan';
exec dbo.spTERMINOLOGY_InsertOnly N'Laos' , N'en-US', null, N'countries_dom' , 116, N'Laos';
exec dbo.spTERMINOLOGY_InsertOnly N'Latvia' , N'en-US', null, N'countries_dom' , 117, N'Latvia';
exec dbo.spTERMINOLOGY_InsertOnly N'Lebanon' , N'en-US', null, N'countries_dom' , 118, N'Lebanon';
exec dbo.spTERMINOLOGY_InsertOnly N'Lesotho' , N'en-US', null, N'countries_dom' , 119, N'Lesotho';
exec dbo.spTERMINOLOGY_InsertOnly N'Liberia' , N'en-US', null, N'countries_dom' , 120, N'Liberia';
exec dbo.spTERMINOLOGY_InsertOnly N'Libya' , N'en-US', null, N'countries_dom' , 121, N'Libya';
exec dbo.spTERMINOLOGY_InsertOnly N'Liechtenstein' , N'en-US', null, N'countries_dom' , 122, N'Liechtenstein';
exec dbo.spTERMINOLOGY_InsertOnly N'Lithuania' , N'en-US', null, N'countries_dom' , 123, N'Lithuania';
exec dbo.spTERMINOLOGY_InsertOnly N'Luxembourg' , N'en-US', null, N'countries_dom' , 124, N'Luxembourg';
exec dbo.spTERMINOLOGY_InsertOnly N'Macao' , N'en-US', null, N'countries_dom' , 125, N'Macao';
exec dbo.spTERMINOLOGY_InsertOnly N'Macedonia' , N'en-US', null, N'countries_dom' , 126, N'Macedonia';
exec dbo.spTERMINOLOGY_InsertOnly N'Madagascar' , N'en-US', null, N'countries_dom' , 127, N'Madagascar';
exec dbo.spTERMINOLOGY_InsertOnly N'Malawi' , N'en-US', null, N'countries_dom' , 128, N'Malawi';
exec dbo.spTERMINOLOGY_InsertOnly N'Malaysia' , N'en-US', null, N'countries_dom' , 129, N'Malaysia';
exec dbo.spTERMINOLOGY_InsertOnly N'Maldives' , N'en-US', null, N'countries_dom' , 130, N'Maldives';
exec dbo.spTERMINOLOGY_InsertOnly N'Mali' , N'en-US', null, N'countries_dom' , 131, N'Mali';
exec dbo.spTERMINOLOGY_InsertOnly N'Malta' , N'en-US', null, N'countries_dom' , 132, N'Malta';
exec dbo.spTERMINOLOGY_InsertOnly N'Marshall Islands' , N'en-US', null, N'countries_dom' , 133, N'Marshall Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Martinique' , N'en-US', null, N'countries_dom' , 134, N'Martinique';
exec dbo.spTERMINOLOGY_InsertOnly N'Mauritania' , N'en-US', null, N'countries_dom' , 135, N'Mauritania';
exec dbo.spTERMINOLOGY_InsertOnly N'Mauritius' , N'en-US', null, N'countries_dom' , 136, N'Mauritius';
exec dbo.spTERMINOLOGY_InsertOnly N'Mayotte' , N'en-US', null, N'countries_dom' , 137, N'Mayotte';
exec dbo.spTERMINOLOGY_InsertOnly N'Mexico' , N'en-US', null, N'countries_dom' , 138, N'Mexico';
exec dbo.spTERMINOLOGY_InsertOnly N'Micronesia' , N'en-US', null, N'countries_dom' , 139, N'Micronesia';
exec dbo.spTERMINOLOGY_InsertOnly N'Miquelon' , N'en-US', null, N'countries_dom' , 140, N'Miquelon';
exec dbo.spTERMINOLOGY_InsertOnly N'Moldova' , N'en-US', null, N'countries_dom' , 141, N'Moldova';
exec dbo.spTERMINOLOGY_InsertOnly N'Monaco' , N'en-US', null, N'countries_dom' , 142, N'Monaco';
exec dbo.spTERMINOLOGY_InsertOnly N'Mongolia' , N'en-US', null, N'countries_dom' , 143, N'Mongolia';
exec dbo.spTERMINOLOGY_InsertOnly N'Montenegro' , N'en-US', null, N'countries_dom' , 144, N'Montenegro';
exec dbo.spTERMINOLOGY_InsertOnly N'Montserrat' , N'en-US', null, N'countries_dom' , 145, N'Montserrat';
exec dbo.spTERMINOLOGY_InsertOnly N'Morocco' , N'en-US', null, N'countries_dom' , 146, N'Morocco';
exec dbo.spTERMINOLOGY_InsertOnly N'Mozambique' , N'en-US', null, N'countries_dom' , 147, N'Mozambique';
exec dbo.spTERMINOLOGY_InsertOnly N'Myanmar' , N'en-US', null, N'countries_dom' , 148, N'Myanmar';
exec dbo.spTERMINOLOGY_InsertOnly N'Namibia' , N'en-US', null, N'countries_dom' , 149, N'Namibia';
exec dbo.spTERMINOLOGY_InsertOnly N'Nauru' , N'en-US', null, N'countries_dom' , 150, N'Nauru';
exec dbo.spTERMINOLOGY_InsertOnly N'Nepal' , N'en-US', null, N'countries_dom' , 151, N'Nepal';
exec dbo.spTERMINOLOGY_InsertOnly N'Netherlands' , N'en-US', null, N'countries_dom' , 152, N'Netherlands';
exec dbo.spTERMINOLOGY_InsertOnly N'Netherlands Antilles' , N'en-US', null, N'countries_dom' , 153, N'Netherlands Antilles';
exec dbo.spTERMINOLOGY_InsertOnly N'Nevis' , N'en-US', null, N'countries_dom' , 154, N'Nevis';
exec dbo.spTERMINOLOGY_InsertOnly N'New Caledonia' , N'en-US', null, N'countries_dom' , 155, N'New Caledonia';
exec dbo.spTERMINOLOGY_InsertOnly N'New Zealand' , N'en-US', null, N'countries_dom' , 156, N'New Zealand';
exec dbo.spTERMINOLOGY_InsertOnly N'Nicaragua' , N'en-US', null, N'countries_dom' , 157, N'Nicaragua';
exec dbo.spTERMINOLOGY_InsertOnly N'Niger' , N'en-US', null, N'countries_dom' , 158, N'Niger';
exec dbo.spTERMINOLOGY_InsertOnly N'Nigeria' , N'en-US', null, N'countries_dom' , 159, N'Nigeria';
exec dbo.spTERMINOLOGY_InsertOnly N'Niue' , N'en-US', null, N'countries_dom' , 160, N'Niue';
exec dbo.spTERMINOLOGY_InsertOnly N'Norfolk Island' , N'en-US', null, N'countries_dom' , 161, N'Norfolk Island';
exec dbo.spTERMINOLOGY_InsertOnly N'North Korea' , N'en-US', null, N'countries_dom' , 162, N'North Korea';
exec dbo.spTERMINOLOGY_InsertOnly N'Norway' , N'en-US', null, N'countries_dom' , 163, N'Norway';
exec dbo.spTERMINOLOGY_InsertOnly N'Oman' , N'en-US', null, N'countries_dom' , 164, N'Oman';
exec dbo.spTERMINOLOGY_InsertOnly N'Pakistan' , N'en-US', null, N'countries_dom' , 165, N'Pakistan';
exec dbo.spTERMINOLOGY_InsertOnly N'Palau' , N'en-US', null, N'countries_dom' , 166, N'Palau';
exec dbo.spTERMINOLOGY_InsertOnly N'Panama' , N'en-US', null, N'countries_dom' , 167, N'Panama';
exec dbo.spTERMINOLOGY_InsertOnly N'Papua New Guinea' , N'en-US', null, N'countries_dom' , 168, N'Papua New Guinea';
exec dbo.spTERMINOLOGY_InsertOnly N'Paraguay' , N'en-US', null, N'countries_dom' , 169, N'Paraguay';
exec dbo.spTERMINOLOGY_InsertOnly N'Peru' , N'en-US', null, N'countries_dom' , 170, N'Peru';
exec dbo.spTERMINOLOGY_InsertOnly N'Philippines' , N'en-US', null, N'countries_dom' , 171, N'Philippines';
exec dbo.spTERMINOLOGY_InsertOnly N'Poland' , N'en-US', null, N'countries_dom' , 172, N'Poland';
exec dbo.spTERMINOLOGY_InsertOnly N'Portugal' , N'en-US', null, N'countries_dom' , 173, N'Portugal';
exec dbo.spTERMINOLOGY_InsertOnly N'Príncipe' , N'en-US', null, N'countries_dom' , 174, N'Príncipe';
exec dbo.spTERMINOLOGY_InsertOnly N'Puerto Rico' , N'en-US', null, N'countries_dom' , 175, N'Puerto Rico';
exec dbo.spTERMINOLOGY_InsertOnly N'Qatar' , N'en-US', null, N'countries_dom' , 176, N'Qatar';
exec dbo.spTERMINOLOGY_InsertOnly N'Reunion' , N'en-US', null, N'countries_dom' , 177, N'Reunion';
exec dbo.spTERMINOLOGY_InsertOnly N'Romania' , N'en-US', null, N'countries_dom' , 178, N'Romania';
exec dbo.spTERMINOLOGY_InsertOnly N'Russia' , N'en-US', null, N'countries_dom' , 179, N'Russia';
exec dbo.spTERMINOLOGY_InsertOnly N'Rwanda' , N'en-US', null, N'countries_dom' , 180, N'Rwanda';
exec dbo.spTERMINOLOGY_InsertOnly N'Samoa' , N'en-US', null, N'countries_dom' , 181, N'Samoa';
exec dbo.spTERMINOLOGY_InsertOnly N'San Marino' , N'en-US', null, N'countries_dom' , 182, N'San Marino';
exec dbo.spTERMINOLOGY_InsertOnly N'São Tomé' , N'en-US', null, N'countries_dom' , 183, N'São Tomé';
exec dbo.spTERMINOLOGY_InsertOnly N'Saudi Arabia' , N'en-US', null, N'countries_dom' , 184, N'Saudi Arabia';
exec dbo.spTERMINOLOGY_InsertOnly N'Senegal' , N'en-US', null, N'countries_dom' , 185, N'Senegal';
exec dbo.spTERMINOLOGY_InsertOnly N'Serbia' , N'en-US', null, N'countries_dom' , 186, N'Serbia';
exec dbo.spTERMINOLOGY_InsertOnly N'Seychelles' , N'en-US', null, N'countries_dom' , 187, N'Seychelles';
exec dbo.spTERMINOLOGY_InsertOnly N'Sierra Leone' , N'en-US', null, N'countries_dom' , 188, N'Sierra Leone';
exec dbo.spTERMINOLOGY_InsertOnly N'Singapore' , N'en-US', null, N'countries_dom' , 189, N'Singapore';
exec dbo.spTERMINOLOGY_InsertOnly N'Slovakia' , N'en-US', null, N'countries_dom' , 190, N'Slovakia';
exec dbo.spTERMINOLOGY_InsertOnly N'Slovenia' , N'en-US', null, N'countries_dom' , 191, N'Slovenia';
exec dbo.spTERMINOLOGY_InsertOnly N'Solomon Islands' , N'en-US', null, N'countries_dom' , 192, N'Solomon Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Somalia' , N'en-US', null, N'countries_dom' , 193, N'Somalia';
exec dbo.spTERMINOLOGY_InsertOnly N'South Africa' , N'en-US', null, N'countries_dom' , 194, N'South Africa';
exec dbo.spTERMINOLOGY_InsertOnly N'Spain' , N'en-US', null, N'countries_dom' , 195, N'Spain';
exec dbo.spTERMINOLOGY_InsertOnly N'Sri Lanka' , N'en-US', null, N'countries_dom' , 196, N'Sri Lanka';
exec dbo.spTERMINOLOGY_InsertOnly N'St. Helena' , N'en-US', null, N'countries_dom' , 197, N'St. Helena';
exec dbo.spTERMINOLOGY_InsertOnly N'St. Kitts' , N'en-US', null, N'countries_dom' , 198, N'St. Kitts ';
exec dbo.spTERMINOLOGY_InsertOnly N'St. Lucia' , N'en-US', null, N'countries_dom' , 199, N'St. Lucia';
exec dbo.spTERMINOLOGY_InsertOnly N'St. Pierre' , N'en-US', null, N'countries_dom' , 200, N'St. Pierre';
exec dbo.spTERMINOLOGY_InsertOnly N'St. Vincent' , N'en-US', null, N'countries_dom' , 201, N'St. Vincent';
exec dbo.spTERMINOLOGY_InsertOnly N'Sudan' , N'en-US', null, N'countries_dom' , 202, N'Sudan';
exec dbo.spTERMINOLOGY_InsertOnly N'Suriname' , N'en-US', null, N'countries_dom' , 203, N'Suriname';
exec dbo.spTERMINOLOGY_InsertOnly N'Swaziland' , N'en-US', null, N'countries_dom' , 204, N'Swaziland';
exec dbo.spTERMINOLOGY_InsertOnly N'Sweden' , N'en-US', null, N'countries_dom' , 205, N'Sweden';
exec dbo.spTERMINOLOGY_InsertOnly N'Switzerland' , N'en-US', null, N'countries_dom' , 206, N'Switzerland';
exec dbo.spTERMINOLOGY_InsertOnly N'Syria' , N'en-US', null, N'countries_dom' , 207, N'Syria';
exec dbo.spTERMINOLOGY_InsertOnly N'Taiwan' , N'en-US', null, N'countries_dom' , 208, N'Taiwan';
exec dbo.spTERMINOLOGY_InsertOnly N'Tajikistan' , N'en-US', null, N'countries_dom' , 209, N'Tajikistan';
exec dbo.spTERMINOLOGY_InsertOnly N'Tanzania' , N'en-US', null, N'countries_dom' , 210, N'Tanzania';
exec dbo.spTERMINOLOGY_InsertOnly N'Thailand' , N'en-US', null, N'countries_dom' , 211, N'Thailand';
exec dbo.spTERMINOLOGY_InsertOnly N'Timor-Leste' , N'en-US', null, N'countries_dom' , 212, N'Timor-Leste';
exec dbo.spTERMINOLOGY_InsertOnly N'Tobago' , N'en-US', null, N'countries_dom' , 213, N'Tobago';
exec dbo.spTERMINOLOGY_InsertOnly N'Togo' , N'en-US', null, N'countries_dom' , 214, N'Togo';
exec dbo.spTERMINOLOGY_InsertOnly N'Tokelau' , N'en-US', null, N'countries_dom' , 215, N'Tokelau';
exec dbo.spTERMINOLOGY_InsertOnly N'Tonga' , N'en-US', null, N'countries_dom' , 216, N'Tonga';
exec dbo.spTERMINOLOGY_InsertOnly N'Trinidad' , N'en-US', null, N'countries_dom' , 217, N'Trinidad';
exec dbo.spTERMINOLOGY_InsertOnly N'Tunisia' , N'en-US', null, N'countries_dom' , 218, N'Tunisia';
exec dbo.spTERMINOLOGY_InsertOnly N'Turkey' , N'en-US', null, N'countries_dom' , 219, N'Turkey';
exec dbo.spTERMINOLOGY_InsertOnly N'Turkmenistan' , N'en-US', null, N'countries_dom' , 220, N'Turkmenistan';
exec dbo.spTERMINOLOGY_InsertOnly N'Turks' , N'en-US', null, N'countries_dom' , 221, N'Turks';
exec dbo.spTERMINOLOGY_InsertOnly N'Tuvalu' , N'en-US', null, N'countries_dom' , 222, N'Tuvalu';
exec dbo.spTERMINOLOGY_InsertOnly N'Uganda' , N'en-US', null, N'countries_dom' , 223, N'Uganda';
exec dbo.spTERMINOLOGY_InsertOnly N'Ukraine' , N'en-US', null, N'countries_dom' , 224, N'Ukraine';
exec dbo.spTERMINOLOGY_InsertOnly N'United Arab Emirates' , N'en-US', null, N'countries_dom' , 225, N'United Arab Emirates';
exec dbo.spTERMINOLOGY_InsertOnly N'United Kingdom' , N'en-US', null, N'countries_dom' , 226, N'United Kingdom';
exec dbo.spTERMINOLOGY_InsertOnly N'Uruguay' , N'en-US', null, N'countries_dom' , 227, N'Uruguay';
exec dbo.spTERMINOLOGY_InsertOnly N'Uzbekistan' , N'en-US', null, N'countries_dom' , 228, N'Uzbekistan';
exec dbo.spTERMINOLOGY_InsertOnly N'Vanuatu' , N'en-US', null, N'countries_dom' , 229, N'Vanuatu';
exec dbo.spTERMINOLOGY_InsertOnly N'Vatican City' , N'en-US', null, N'countries_dom' , 230, N'Vatican City';
exec dbo.spTERMINOLOGY_InsertOnly N'Venezuela' , N'en-US', null, N'countries_dom' , 231, N'Venezuela';
exec dbo.spTERMINOLOGY_InsertOnly N'Vietnam' , N'en-US', null, N'countries_dom' , 232, N'Vietnam';
exec dbo.spTERMINOLOGY_InsertOnly N'Virgin Islands' , N'en-US', null, N'countries_dom' , 233, N'Virgin Islands';
exec dbo.spTERMINOLOGY_InsertOnly N'Wallis' , N'en-US', null, N'countries_dom' , 234, N'Wallis';
exec dbo.spTERMINOLOGY_InsertOnly N'Yemen' , N'en-US', null, N'countries_dom' , 235, N'Yemen';
exec dbo.spTERMINOLOGY_InsertOnly N'Zambia' , N'en-US', null, N'countries_dom' , 236, N'Zambia';
exec dbo.spTERMINOLOGY_InsertOnly N'Zimbabwe' , N'en-US', null, N'countries_dom' , 237, N'Zimbabwe';
GO
set nocount off;
GO
/* -- #if Oracle
COMMIT WORK;
END;
/
-- #endif Oracle */
/* -- #if IBM_DB2
commit;
end
/
call dbo.spTERMINOLOGY_Countries_en_us()
/
call dbo.spSqlDropProcedure('spTERMINOLOGY_Countries_en_us')
/
-- #endif IBM_DB2 */
|
-- randexpr1.test
--
-- db eval {SELECT (case when case 19 when b then 13 else t1.e end in (select (c) from t1 union select a from t1) then 11 else (d*coalesce((select t1.b from t1 where 19 | (t1.c)+case when -17>t1.a then t1.a when (19>=13) then t1.a else t1.e end=f and (e)>a or 11 in (select e from t1 union select t1.b from t1)),t1.d*t1.e))+c+a end) FROM t1 WHERE case case when case when not (abs(t1.d | f)/abs(a)) in (d,a,t1.c) then -d when (not exists(select 1 from t1 where f>=c)) then t1.a else e end+t1.d not in (t1.b,(t1.f),11) or not exists(select 1 from t1 where not exists(select 1 from t1 where t1.b in (19,t1.c,13))) then b+t1.c when t1.d>=t1.a then t1.c else (c) end when (13) then t1.e else b end in (t1.a,t1.b,13)}
SELECT (case when case 19 when b then 13 else t1.e end in (select (c) from t1 union select a from t1) then 11 else (d*coalesce((select t1.b from t1 where 19 | (t1.c)+case when -17>t1.a then t1.a when (19>=13) then t1.a else t1.e end=f and (e)>a or 11 in (select e from t1 union select t1.b from t1)),t1.d*t1.e))+c+a end) FROM t1 WHERE case case when case when not (abs(t1.d | f)/abs(a)) in (d,a,t1.c) then -d when (not exists(select 1 from t1 where f>=c)) then t1.a else e end+t1.d not in (t1.b,(t1.f),11) or not exists(select 1 from t1 where not exists(select 1 from t1 where t1.b in (19,t1.c,13))) then b+t1.c when t1.d>=t1.a then t1.c else (c) end when (13) then t1.e else b end in (t1.a,t1.b,13) |
/*
* $Id: lastval.sql 5406 2014-10-13 20:53:18Z unsaved $
*
* Tests auto-variable ?
*/
CREATE TABLE t (i INT);
* if (*? != 0) \q ? variable not capturing CREATE TABLE return value
INSERT INTO t values (21);
* if (*? != 1) \q ? variable not capturing INSERT return value
INSERT INTO t values (10);
* if (*? != 1) \q ? variable not capturing INSERT return value
INSERT INTO t values (43);
* if (*? != 1) \q ? variable not capturing INSERT return value
SELECT * FROM t ORDER BY i DESC;
* if (*? != 10) \q ? variable not capturing last fetched value
\p echo some stuff
\p to verify that ? variable value is preserved
* list
* if (*? != 10) \q ? value not retained after special commands
* if (*{?} != 10) \q ? value not dereferenced with {} usage
|
<gh_stars>10-100
-- file:alter_table.sql ln:2276 expect:true
DROP TABLE part_3_4
|
<reponame>sergb213/pgtoolsservice
{#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2017, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#}
SELECT
i.indexrelid,
CASE i.indoption[i.attnum - 1]
WHEN 0 THEN ARRAY['ASC', 'NULLS LAST']
WHEN 1 THEN ARRAY['DESC', 'NULLS FIRST']
WHEN 2 THEN ARRAY['ASC', 'NULLS FIRST']
WHEN 3 THEN ARRAY['DESC', 'NULLS ']
ELSE ARRAY['UNKNOWN OPTION' || i.indoption[i.attnum - 1], '']
END::text[] AS options,
i.attnum,
pg_get_indexdef(i.indexrelid, i.attnum, true) as attdef,
CASE WHEN (o.opcdefault = FALSE) THEN o.opcname ELSE null END AS opcname,
op.oprname AS oprname,
CASE WHEN length(nspc.nspname) > 0 AND length(coll.collname) > 0 THEN
concat(quote_ident(nspc.nspname), '.', quote_ident(coll.collname))
ELSE '' END AS collnspname
FROM (
SELECT
indexrelid, i.indoption, i.indclass,
unnest(ARRAY(SELECT generate_series(1, i.indnatts) AS n)) AS attnum
FROM
pg_index i
WHERE i.indexrelid = {{idx}}::OID
) i
LEFT JOIN pg_opclass o ON (o.oid = i.indclass[i.attnum - 1])
LEFT OUTER JOIN pg_constraint c ON (c.conindid = i.indexrelid)
LEFT OUTER JOIN pg_operator op ON (op.oid = c.conexclop[i.attnum])
LEFT JOIN pg_attribute a ON (a.attrelid = i.indexrelid AND a.attnum = i.attnum)
LEFT OUTER JOIN pg_collation coll ON a.attcollation=coll.oid
LEFT OUTER JOIN pg_namespace nspc ON coll.collnamespace=nspc.oid
ORDER BY i.attnum;
|
--Problem 8
CREATE PROCEDURE usp_DeleteEmployeesFromDepartment (@departmentId INT)
AS
ALTER TABLE Departments
ALTER COLUMN ManagerID INT NULL
DELETE FROM EmployeesProjects
WHERE EmployeeID IN(SELECT EmployeeID FROM Employees WHERE DepartmentID = @departmentId)
UPDATE Employees
SET ManagerID = NULL
WHERE EmployeeID IN(SELECT EmployeeID FROM Employees WHERE DepartmentID = @departmentId)
UPDATE Employees
SET ManagerID = NULL
WHERE ManagerID IN (SELECT EmployeeID FROM Employees WHERE DepartmentID = @departmentId)
UPDATE Departments
SET ManagerID = NULL
WHERE DepartmentID = @departmentId
DELETE FROM Employees
WHERE DepartmentID = @departmentId
DELETE FROM Departments
WHERE DepartmentID = @departmentId
SELECT COUNT(*)
FROM Employees
WHERE DepartmentID = @departmentId |
<gh_stars>0
-- ============================================================================
-- Copyright (C) 2011 <NAME> <<EMAIL>>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- $Id: llx_pos_ticket.key.sql,v 1.1 2011-08-04 16:33:26 jmenent Exp $
-- ============================================================================
ALTER TABLE llx_pos_facture ADD UNIQUE INDEX idx_facture_uk_facnumber (fk_facture);
ALTER TABLE llx_pos_facture ADD CONSTRAINT fk_facture_fk_cash FOREIGN KEY (fk_cash) REFERENCES llx_pos_cash (rowid);
ALTER TABLE llx_pos_facture ADD CONSTRAINT fk_facture_fk_place FOREIGN KEY (fk_place) REFERENCES llx_pos_places (rowid);
ALTER TABLE llx_pos_facture ADD CONSTRAINT fk_facture_fk_control_cash FOREIGN KEY (fk_control_cash) REFERENCES llx_pos_control_cash (rowid); |
<filename>pkg/sql/sem/tree/testdata/pretty/create_subpartition.sql
CREATE TABLE students (
id SERIAL,
name STRING,
email STRING,
country STRING,
expected_graduation_date DATE,
PRIMARY KEY (country, expected_graduation_date, id))
PARTITION BY LIST (country)(
PARTITION australia VALUES IN ('AU','NZ') PARTITION BY RANGE (expected_graduation_date)(PARTITION graduated_au VALUES FROM (MINVALUE) TO ('2017-08-15'), PARTITION current_au VALUES FROM ('2017-08-15') TO (MAXVALUE)),
PARTITION north_america VALUES IN ('US','CA') PARTITION BY RANGE (expected_graduation_date)(PARTITION graduated_us VALUES FROM (MINVALUE) TO ('2017-08-15'), PARTITION current_us VALUES FROM ('2017-08-15') TO (MAXVALUE))
)
|
<gh_stars>1-10
IF EXISTS (select * from dbo.sysobjects where id = object_id(N'uspGetAllActiveLanguage') AND OBJECTPROPERTY(id, N'IsProcedure') = 1)
DROP PROCEDURE uspGetAllActiveLanguage
GO
/* uspGetAllActiveLanguage
*
*
*
*
*/
CREATE PROCEDURE [dbo].[uspGetAllActiveLanguage]
AS
SET NOCOUNT ON
SELECT
Ident,
Name1
FROM
Language1 WITH (NOLOCK)
WHERE
Active = 1
ORDER BY
Name1
GO |
<reponame>bhutchinson/rice
--
-- Copyright 2005-2015 The Kuali Foundation
--
-- Licensed under the Educational Community License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.opensource.org/licenses/ecl2.php
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
INSERT INTO KRCR_PARM_T (OBJ_ID, NMSPC_CD, CMPNT_CD, PARM_NM, PARM_TYP_CD, VAL, PARM_DESC_TXT, EVAL_OPRTR_CD, APPL_ID)
SELECT UUID(), 'KR-KRAD', CMPNT_CD, PARM_NM, PARM_TYP_CD, VAL, PARM_DESC_TXT, EVAL_OPRTR_CD, APPL_ID
FROM KRCR_PARM_T
WHERE NMSPC_CD = 'KR-NS'
AND CMPNT_CD = 'Lookup'
AND PARM_NM = 'RESULTS_LIMIT'
AND APPL_ID NOT IN (SELECT APPL_ID FROM KRCR_PARM_T
WHERE NMSPC_CD = 'KR-KRAD'
AND CMPNT_CD = 'Lookup'
AND PARM_NM = 'RESULTS_LIMIT')
/ |
-- DB/GHOST ZONES: This fix all ghost zones errors on start up
-- GHOSTZONES: Deleting ghost zones for 4.1 or 4.2 clients
DELETE FROM `game_graveyard_zone` WHERE ghost_zone IN('33','36','5583','5702','3535'); |
<filename>database/data/cfdi_metodos_pago.sql
PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
INSERT INTO cfdi_metodos_pago VALUES('PUE','Pago en una sola exhibición','2017-01-01','');
INSERT INTO cfdi_metodos_pago VALUES('PPD','Pago en parcialidades o diferido','2017-01-01','');
COMMIT;
|
create table s_tag(
id INT NOT NULL AUTO_INCREMENT,
tag VARCHAR(200) NOT NULL,
create_tstamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ( id )
); |
<reponame>ediththakkar/data-design
ALTER DATABASE egarcia262 CHARACTER SET utf8 COLLATE utf8_unicode_ci;
DROP TABLE IF EXISTS profile;
DROP TABLE IF EXISTS post;
DROP TABLE IF EXISTS `like`;
CREATE TABLE profile (
profileId BINARY (16) NOT NULL,
profileEmail VARCHAR (120) NOT NULL,
profileHash CHAR (97) NOT NULL,
profileImage VARCHAR (120),
profileName CHAR (90) NOT NULL,
profileUsername CHAR (98) NOT NULL,
PRIMARY KEY(profileId)
);
CREATE TABLE post (
postId BINARY (16) NOT NULL,
postImage VARCHAR (120) NOT NULL,
postDescription VARCHAR (120) NOT NULL,
postTitle CHAR (90) NOT NULL,
postProfileId BINARY (16) NOT NULL,
INDEX (postProfileId),
FOREIGN KEY (postProfileId) REFERENCES profile (profileId),
PRIMARY KEY (postId)
);
CREATE TABLE `like` (
likeProfileId BINARY (16),
-- (This is a foreign key. Both foreign keys make the primary key)
likePostId BINARY (16),
-- (This is a foreign key. Both foreign keys make the primary key)
FOREIGN KEY(likeProfileId) REFERENCES profile (profileId),
FOREIGN KEY(likePostId) REFERENCES post (postId),
PRIMARY KEY (likeProfileId, likePostId)
); |
<filename>EdFi.Ods.Utilities.Migration/Scripts/MsSql/02Upgrade/v24_to_v25/09 Update Existing Tables/10410 AssessmentSection.sql<gh_stars>0
-- SPDX-License-Identifier: Apache-2.0
-- Licensed to the Ed-Fi Alliance under one or more agreements.
-- The Ed-Fi Alliance licenses this file to you under the Apache License, Version 2.0.
-- See the LICENSE and NOTICES files in the project root for more information.
PRINT N'Updating Assessment key references in [edfi].[AssessmentSection]'
GO
ALTER TABLE [edfi].[AssessmentSection]
ADD
[AssessmentIdentifier] [nvarchar] (60) NULL,
[Namespace] [nvarchar] (255) NULL
GO
UPDATE [edfi].[AssessmentSection]
SET
[AssessmentIdentifier] = m.[AssessmentIdentifier],
[Namespace] = m.[Namespace]
FROM [edfi].[AssessmentSection] a
INNER JOIN [migration_tempdata].[AssessmentIdentityMapping] m
ON a.[AssessmentTitle] = m.[AssessmentTitle]
AND a.[AssessedGradeLevelDescriptorId] = m.[AssessedGradeLevelDescriptorId]
AND a.[AcademicSubjectDescriptorId] = m.[AcademicSubjectDescriptorId]
AND a.[Version] = m.[Version]
GO
ALTER TABLE [edfi].[AssessmentSection]
ALTER COLUMN [AssessmentIdentifier] [nvarchar] (60) NOT NULL
GO
ALTER TABLE [edfi].[AssessmentSection]
ALTER COLUMN [Namespace] [nvarchar] (255) NOT NULL
GO
ALTER TABLE [edfi].[AssessmentSection] DROP
COLUMN [AssessmentTitle],
COLUMN [Version],
COLUMN [AssessedGradeLevelDescriptorId],
COLUMN [AcademicSubjectDescriptorId]
GO
|
-- BP 6.1D content: domain syschar: 3
INSERT INTO S_DOM
VALUES (50512,
'memleak',
'This domain tests sychronous deletion of instances.
Three instances of the Running class are created in the initialization object. One is deleted by another instance, one is deleted by itself, and the last is deleted as a final deletion state.
For an instance which deletes itself, we treat it as if it were a final deletion state and therefore, needed to test that real final deletion states still work correctly, especially if there are multiple final deletion states in the state chart.
Just before the delete self happens, an event is generated to the instance. This event should be enqueued to the instance, but the system should not attempt to deliver this event. If the event is delivered, then the test fails by invoking "LOG::LogFailure" bridge and then "ARCH:shutdown".',
0,
1);
INSERT INTO S_CDT
VALUES (524289,
0);
INSERT INTO S_DT
VALUES (524289,
50512,
'void',
'');
INSERT INTO S_CDT
VALUES (524290,
1);
INSERT INTO S_DT
VALUES (524290,
50512,
'boolean',
'');
INSERT INTO S_CDT
VALUES (524291,
2);
INSERT INTO S_DT
VALUES (524291,
50512,
'integer',
'');
INSERT INTO S_CDT
VALUES (524292,
3);
INSERT INTO S_DT
VALUES (524292,
50512,
'real',
'');
INSERT INTO S_CDT
VALUES (524293,
4);
INSERT INTO S_DT
VALUES (524293,
50512,
'string',
'');
INSERT INTO S_CDT
VALUES (524294,
5);
INSERT INTO S_DT
VALUES (524294,
50512,
'unique_id',
'');
INSERT INTO S_CDT
VALUES (524295,
6);
INSERT INTO S_DT
VALUES (524295,
50512,
'state<State_Model>',
'');
INSERT INTO S_CDT
VALUES (524296,
7);
INSERT INTO S_DT
VALUES (524296,
50512,
'same_as<Base_Attribute>',
'');
INSERT INTO S_CDT
VALUES (524297,
8);
INSERT INTO S_DT
VALUES (524297,
50512,
'inst_ref<Object>',
'');
INSERT INTO S_CDT
VALUES (524298,
9);
INSERT INTO S_DT
VALUES (524298,
50512,
'inst_ref_set<Object>',
'');
INSERT INTO S_CDT
VALUES (524299,
10);
INSERT INTO S_DT
VALUES (524299,
50512,
'inst<Event>',
'');
INSERT INTO S_CDT
VALUES (524300,
11);
INSERT INTO S_DT
VALUES (524300,
50512,
'inst<Mapping>',
'');
INSERT INTO S_CDT
VALUES (524301,
12);
INSERT INTO S_DT
VALUES (524301,
50512,
'inst_ref<Mapping>',
'');
INSERT INTO S_UDT
VALUES (524302,
524300,
1);
INSERT INTO S_DT
VALUES (524302,
50512,
'date',
'');
INSERT INTO S_UDT
VALUES (524303,
524300,
2);
INSERT INTO S_DT
VALUES (524303,
50512,
'timestamp',
'');
INSERT INTO S_UDT
VALUES (524304,
524301,
3);
INSERT INTO S_DT
VALUES (524304,
50512,
'inst_ref<Timer>',
'');
INSERT INTO S_UDT
VALUES (524305,
524294,
0);
INSERT INTO S_DT
VALUES (524305,
50512,
'arbitrary_id',
'Arbitrary ID with core data type of unique_id.');
INSERT INTO S_EE
VALUES (524289,
'Time',
'',
'TIM',
50512);
INSERT INTO S_BRG
VALUES (524289,
524289,
'current_date',
'',
1,
524302,
'',
0);
INSERT INTO S_BRG
VALUES (524290,
524289,
'create_date',
'',
1,
524302,
'',
0);
INSERT INTO S_BPARM
VALUES (524289,
524290,
'second',
524291,
0);
INSERT INTO S_BPARM
VALUES (524290,
524290,
'minute',
524291,
0);
INSERT INTO S_BPARM
VALUES (524291,
524290,
'hour',
524291,
0);
INSERT INTO S_BPARM
VALUES (524292,
524290,
'day',
524291,
0);
INSERT INTO S_BPARM
VALUES (524293,
524290,
'month',
524291,
0);
INSERT INTO S_BPARM
VALUES (524294,
524290,
'year',
524291,
0);
INSERT INTO S_BRG
VALUES (524291,
524289,
'get_second',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524295,
524291,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524292,
524289,
'get_minute',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524296,
524292,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524293,
524289,
'get_hour',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524297,
524293,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524294,
524289,
'get_day',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524298,
524294,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524295,
524289,
'get_month',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524299,
524295,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524296,
524289,
'get_year',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524300,
524296,
'date',
524302,
0);
INSERT INTO S_BRG
VALUES (524297,
524289,
'current_clock',
'',
1,
524303,
'',
0);
INSERT INTO S_BRG
VALUES (524298,
524289,
'timer_start',
'',
1,
524304,
'',
0);
INSERT INTO S_BPARM
VALUES (524301,
524298,
'microseconds',
524291,
0);
INSERT INTO S_BPARM
VALUES (524302,
524298,
'event_inst',
524299,
0);
INSERT INTO S_BRG
VALUES (524299,
524289,
'timer_start_recurring',
'',
1,
524304,
'',
0);
INSERT INTO S_BPARM
VALUES (524303,
524299,
'microseconds',
524291,
0);
INSERT INTO S_BPARM
VALUES (524304,
524299,
'event_inst',
524299,
0);
INSERT INTO S_BRG
VALUES (524300,
524289,
'timer_remaining_time',
'',
1,
524291,
'',
0);
INSERT INTO S_BPARM
VALUES (524305,
524300,
'timer_inst_ref',
524304,
0);
INSERT INTO S_BRG
VALUES (524301,
524289,
'timer_reset_time',
'',
1,
524290,
'',
0);
INSERT INTO S_BPARM
VALUES (524306,
524301,
'timer_inst_ref',
524304,
0);
INSERT INTO S_BPARM
VALUES (524307,
524301,
'microseconds',
524291,
0);
INSERT INTO S_BRG
VALUES (524302,
524289,
'timer_add_time',
'',
1,
524290,
'',
0);
INSERT INTO S_BPARM
VALUES (524308,
524302,
'timer_inst_ref',
524304,
0);
INSERT INTO S_BPARM
VALUES (524309,
524302,
'microseconds',
524291,
0);
INSERT INTO S_BRG
VALUES (524303,
524289,
'timer_cancel',
'',
1,
524290,
'',
0);
INSERT INTO S_BPARM
VALUES (524310,
524303,
'timer_inst_ref',
524304,
0);
INSERT INTO S_EE
VALUES (524290,
'Architecture',
'',
'ARCH',
50512);
INSERT INTO S_BRG
VALUES (524304,
524290,
'shutdown',
'',
0,
524289,
'control stop;',
1);
INSERT INTO S_EE
VALUES (524292,
'Logging',
'',
'LOG',
50512);
INSERT INTO S_BRG
VALUES (524307,
524292,
'LogFailure',
'',
0,
524289,
'',
1);
INSERT INTO S_BPARM
VALUES (524311,
524307,
'message',
524293,
0);
INSERT INTO S_BRG
VALUES (524308,
524292,
'LogSuccess',
'',
0,
524289,
'',
1);
INSERT INTO S_BPARM
VALUES (524312,
524308,
'message',
524293,
0);
INSERT INTO GD_MD
VALUES (524289,
1,
50512,
1,
1,
0,
1,
1,
0,
12,
1599,
4181,
1.000000,
0);
INSERT INTO GD_GE
VALUES (524293,
524289,
524289,
12);
INSERT INTO GD_SHP
VALUES (524293,
1920,
1536,
2080,
1632);
INSERT INTO GD_GE
VALUES (524297,
524289,
1048578,
11);
INSERT INTO GD_SHP
VALUES (524297,
1920,
1376,
2080,
1472);
INSERT INTO GD_GE
VALUES (524307,
524289,
524290,
12);
INSERT INTO GD_SHP
VALUES (524307,
2128,
1536,
2288,
1632);
INSERT INTO GD_GE
VALUES (524312,
524289,
524292,
12);
INSERT INTO GD_SHP
VALUES (524312,
1712,
1536,
1872,
1632);
INSERT INTO GD_MD
VALUES (524290,
2,
50512,
1,
1,
0,
1,
1,
0,
12,
1600,
4200,
1.000000,
0);
INSERT INTO GD_GE
VALUES (524298,
524290,
1048578,
11);
INSERT INTO GD_SHP
VALUES (524298,
1904,
1376,
2080,
1472);
INSERT INTO GD_MD
VALUES (524291,
3,
50512,
1,
1,
0,
1,
1,
0,
12,
1600,
4200,
1.000000,
0);
INSERT INTO GD_GE
VALUES (524299,
524291,
1048578,
11);
INSERT INTO GD_SHP
VALUES (524299,
1904,
1376,
2080,
1472);
INSERT INTO GD_MD
VALUES (524292,
4,
50512,
1,
1,
0,
1,
1,
0,
12,
1600,
4200,
1.000000,
0);
INSERT INTO GD_GE
VALUES (524300,
524292,
1048578,
11);
INSERT INTO GD_SHP
VALUES (524300,
1904,
1376,
2080,
1472);
INSERT INTO S_SS
VALUES (1048578,
'memleak',
'',
'ML',
1,
50512,
1048578);
INSERT INTO O_OBJ
VALUES (1048577,
'Initialization',
1,
'INIT',
'',
1048578);
INSERT INTO O_NBATTR
VALUES (1048577,
1048577);
INSERT INTO O_BATTR
VALUES (1048577,
1048577);
INSERT INTO O_ATTR
VALUES (1048577,
1048577,
0,
'current_state',
'',
'',
'current_state',
0,
524295);
INSERT INTO SM_ISM
VALUES (1572867,
1048577);
INSERT INTO SM_SM
VALUES (1572867,
'',
3);
INSERT INTO SM_MOORE
VALUES (1572867);
INSERT INTO SM_SUPDT
VALUES (1572865,
1572867,
0);
INSERT INTO SM_STATE
VALUES (1572865,
1572867,
1572865,
'Initialization',
1,
0);
INSERT INTO SM_LEVT
VALUES (1572865,
1572867,
1572865);
INSERT INTO SM_SEVT
VALUES (1572865,
1572867,
1572865);
INSERT INTO SM_EVT
VALUES (1572865,
1572867,
1572865,
1,
'Start',
0,
'',
'INIT1',
'');
INSERT INTO SM_SEME
VALUES (1572865,
1572865,
1572867,
1572865);
INSERT INTO SM_NSTXN
VALUES (1572865,
1572867,
1572865,
1572865,
1572865);
INSERT INTO SM_TXN
VALUES (1572865,
1572867,
1572865,
1572865);
INSERT INTO SM_MOAH
VALUES (1572865,
1572867,
1572865);
INSERT INTO SM_AH
VALUES (1572865,
1572867);
INSERT INTO SM_ACT
VALUES (1572865,
1572867,
1,
'create object instance run of R;
run.id = 0;
create object instance run2 of R;
run2.id = 1;
create object instance run3 of R;
run3.id = 100;
create object instance driver of D;
generate D1:''Begin'' to driver;',
'');
INSERT INTO GD_MD
VALUES (1572865,
8,
1572867,
40,
1,
0,
1,
1,
0,
12,
1600,
4199,
1.000000,
0);
INSERT INTO GD_GE
VALUES (1572866,
1572865,
1572865,
41);
INSERT INTO GD_SHP
VALUES (1572866,
1824,
1344,
2144,
1568);
INSERT INTO GD_GE
VALUES (1572867,
1572865,
1572865,
42);
INSERT INTO GD_CON
VALUES (1572867,
1572866,
1572866,
0);
INSERT INTO GD_CTXT
VALUES (1572867,
0,
0,
0,
0,
0,
0,
1940,
1300,
2016,
1322,
0,
0,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (1572880,
1572867,
1840,
1344,
1840,
1328,
0);
INSERT INTO GD_LS
VALUES (1572881,
1572867,
1840,
1328,
2112,
1328,
1572880);
INSERT INTO GD_LS
VALUES (1572882,
1572867,
2112,
1328,
2112,
1344,
1572881);
INSERT INTO O_OBJ
VALUES (1048578,
'Running',
2,
'R',
'',
1048578);
INSERT INTO O_NBATTR
VALUES (1048579,
1048578);
INSERT INTO O_BATTR
VALUES (1048579,
1048578);
INSERT INTO O_ATTR
VALUES (1048579,
1048578,
0,
'id',
'',
'',
'id',
0,
524291);
INSERT INTO O_NBATTR
VALUES (1048578,
1048578);
INSERT INTO O_BATTR
VALUES (1048578,
1048578);
INSERT INTO O_ATTR
VALUES (1048578,
1048578,
1048579,
'current_state',
'',
'',
'current_state',
0,
524295);
INSERT INTO SM_ISM
VALUES (2097156,
1048578);
INSERT INTO SM_SM
VALUES (2097156,
'',
4);
INSERT INTO SM_MOORE
VALUES (2097156);
INSERT INTO SM_EVTDI
VALUES (2097153,
2097156,
'count',
'',
524291);
INSERT INTO SM_SUPDT
VALUES (2097155,
2097156,
0);
INSERT INTO SM_SDI
VALUES (2097153,
2097155,
2097156);
INSERT INTO SM_SUPDT
VALUES (2097157,
2097156,
0);
INSERT INTO SM_STATE
VALUES (2097153,
2097156,
2097155,
'Running Delete Any',
1,
0);
INSERT INTO SM_LEVT
VALUES (2097153,
2097156,
2097155);
INSERT INTO SM_SEVT
VALUES (2097153,
2097156,
2097155);
INSERT INTO SM_EVT
VALUES (2097153,
2097156,
2097155,
1,
'Begin Run',
0,
'',
'R1',
'');
INSERT INTO SM_SEME
VALUES (2097153,
2097153,
2097156,
2097155);
INSERT INTO SM_LEVT
VALUES (2097154,
2097156,
2097155);
INSERT INTO SM_SEVT
VALUES (2097154,
2097156,
2097155);
INSERT INTO SM_EVT
VALUES (2097154,
2097156,
2097155,
2,
'Del Self',
0,
'',
'R2',
'');
INSERT INTO SM_SEME
VALUES (2097153,
2097154,
2097156,
2097155);
INSERT INTO SM_LEVT
VALUES (2097155,
2097156,
2097157);
INSERT INTO SM_SEVT
VALUES (2097155,
2097156,
2097157);
INSERT INTO SM_EVT
VALUES (2097155,
2097156,
2097157,
3,
'Final Del',
0,
'',
'R3',
'');
INSERT INTO SM_EIGN
VALUES (2097153,
2097155,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097153,
2097155,
2097156,
2097157);
INSERT INTO SM_LEVT
VALUES (2097156,
2097156,
2097157);
INSERT INTO SM_SEVT
VALUES (2097156,
2097156,
2097157);
INSERT INTO SM_EVT
VALUES (2097156,
2097156,
2097157,
4,
'Final Del 2',
0,
'',
'R4',
'');
INSERT INTO SM_EIGN
VALUES (2097153,
2097156,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097153,
2097156,
2097156,
2097157);
INSERT INTO SM_LEVT
VALUES (2097157,
2097156,
2097157);
INSERT INTO SM_SEVT
VALUES (2097157,
2097156,
2097157);
INSERT INTO SM_EVT
VALUES (2097157,
2097156,
2097157,
5,
'Final Del 3',
0,
'',
'R5',
'');
INSERT INTO SM_EIGN
VALUES (2097153,
2097157,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097153,
2097157,
2097156,
2097157);
INSERT INTO SM_STATE
VALUES (2097154,
2097156,
2097155,
'Running Delete Self',
2,
0);
INSERT INTO SM_EIGN
VALUES (2097154,
2097153,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097154,
2097153,
2097156,
2097155);
INSERT INTO SM_SEME
VALUES (2097154,
2097154,
2097156,
2097155);
INSERT INTO SM_SEME
VALUES (2097154,
2097155,
2097156,
2097157);
INSERT INTO SM_SEME
VALUES (2097154,
2097156,
2097156,
2097157);
INSERT INTO SM_SEME
VALUES (2097154,
2097157,
2097156,
2097157);
INSERT INTO SM_STATE
VALUES (2097155,
2097156,
2097157,
'Final Delete State',
3,
1);
INSERT INTO SM_EIGN
VALUES (2097155,
2097153,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097155,
2097153,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097155,
2097154,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097155,
2097154,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097155,
2097155,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097155,
2097155,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097155,
2097156,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097155,
2097156,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097155,
2097157,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097155,
2097157,
2097156,
2097157);
INSERT INTO SM_STATE
VALUES (2097156,
2097156,
2097157,
'Final Delete State 2',
4,
1);
INSERT INTO SM_EIGN
VALUES (2097156,
2097153,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097156,
2097153,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097156,
2097154,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097156,
2097154,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097156,
2097155,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097156,
2097155,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097156,
2097156,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097156,
2097156,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097156,
2097157,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097156,
2097157,
2097156,
2097157);
INSERT INTO SM_STATE
VALUES (2097157,
2097156,
2097157,
'Final Delete State 3',
5,
1);
INSERT INTO SM_EIGN
VALUES (2097157,
2097153,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097157,
2097153,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097157,
2097154,
2097156,
2097155,
'');
INSERT INTO SM_SEME
VALUES (2097157,
2097154,
2097156,
2097155);
INSERT INTO SM_EIGN
VALUES (2097157,
2097155,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097157,
2097155,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097157,
2097156,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097157,
2097156,
2097156,
2097157);
INSERT INTO SM_EIGN
VALUES (2097157,
2097157,
2097156,
2097157,
'');
INSERT INTO SM_SEME
VALUES (2097157,
2097157,
2097156,
2097157);
INSERT INTO SM_NSTXN
VALUES (2097153,
2097156,
2097153,
2097153,
2097155);
INSERT INTO SM_TXN
VALUES (2097153,
2097156,
2097153,
2097155);
INSERT INTO SM_NSTXN
VALUES (2097154,
2097156,
2097153,
2097154,
2097155);
INSERT INTO SM_TXN
VALUES (2097154,
2097156,
2097154,
2097155);
INSERT INTO SM_NSTXN
VALUES (2097156,
2097156,
2097154,
2097154,
2097155);
INSERT INTO SM_TXN
VALUES (2097156,
2097156,
2097154,
2097155);
INSERT INTO SM_NSTXN
VALUES (2097157,
2097156,
2097154,
2097155,
2097157);
INSERT INTO SM_TXN
VALUES (2097157,
2097156,
2097155,
2097157);
INSERT INTO SM_NSTXN
VALUES (2097159,
2097156,
2097154,
2097156,
2097157);
INSERT INTO SM_TXN
VALUES (2097159,
2097156,
2097156,
2097157);
INSERT INTO SM_NSTXN
VALUES (2097158,
2097156,
2097154,
2097157,
2097157);
INSERT INTO SM_TXN
VALUES (2097158,
2097156,
2097157,
2097157);
INSERT INTO SM_MOAH
VALUES (2097153,
2097156,
2097153);
INSERT INTO SM_AH
VALUES (2097153,
2097156);
INSERT INTO SM_ACT
VALUES (2097153,
2097156,
1,
'if(rcvd_evt.count == 0)
select any run from instances of R where (selected.id == 100);
delete object instance run;
end if;
generate R2:''Del Self''(count:rcvd_evt.count) to self;
',
'');
INSERT INTO SM_MOAH
VALUES (2097154,
2097156,
2097154);
INSERT INTO SM_AH
VALUES (2097154,
2097156);
INSERT INTO SM_ACT
VALUES (2097154,
2097156,
1,
'if(rcvd_evt.count == 0)
generate R2:''Del Self''(count:100) to self;
delete object instance self;
select any driver from instances of D;
create event instance d of D2:''Process''(count:1) to driver;
timer = TIM::timer_start(microseconds:1000000, event_inst:d);
elif(rcvd_evt.count == 1)
generate R3:''Final Del'' to self;
else
LOG::LogFailure(message:"We transitioned to ourselves AFTER being deleted!!!");
ARCH::shutdown();
end if;
',
'');
INSERT INTO SM_MOAH
VALUES (2097155,
2097156,
2097155);
INSERT INTO SM_AH
VALUES (2097155,
2097156);
INSERT INTO SM_ACT
VALUES (2097155,
2097156,
1,
'// Can we select ourselves from the instances?
select any run from instances of R where (selected.id == 0);
if(cardinality run > 0)
LOG::LogFailure(message:"We selected a deleted instance!!");
ARCH::shutdown();
else
select any driver from instances of D;
create event instance d of D2:''Process''(count:100) to driver;
timer = TIM::timer_start(microseconds:1000000, event_inst:d);
end if;',
'');
INSERT INTO SM_MOAH
VALUES (2097156,
2097156,
2097156);
INSERT INTO SM_AH
VALUES (2097156,
2097156);
INSERT INTO SM_ACT
VALUES (2097156,
2097156,
1,
'',
'');
INSERT INTO SM_MOAH
VALUES (2097157,
2097156,
2097157);
INSERT INTO SM_AH
VALUES (2097157,
2097156);
INSERT INTO SM_ACT
VALUES (2097157,
2097156,
1,
'',
'');
INSERT INTO GD_MD
VALUES (2097153,
8,
2097156,
40,
1,
0,
1,
1,
0,
12,
1553,
3920,
0.756303,
0);
INSERT INTO GD_GE
VALUES (2097154,
2097153,
2097153,
41);
INSERT INTO GD_SHP
VALUES (2097154,
1760,
1312,
2272,
1488);
INSERT INTO GD_GE
VALUES (2097159,
2097153,
2097154,
41);
INSERT INTO GD_SHP
VALUES (2097159,
1760,
1552,
2272,
1744);
INSERT INTO GD_GE
VALUES (2097407,
2097153,
2097155,
41);
INSERT INTO GD_SHP
VALUES (2097407,
1760,
1824,
2272,
2000);
INSERT INTO GD_GE
VALUES (2097410,
2097153,
2097156,
41);
INSERT INTO GD_SHP
VALUES (2097410,
1216,
1824,
1680,
2000);
INSERT INTO GD_GE
VALUES (2097411,
2097153,
2097157,
41);
INSERT INTO GD_SHP
VALUES (2097411,
2368,
1824,
2800,
1984);
INSERT INTO GD_GE
VALUES (2097155,
2097153,
2097153,
42);
INSERT INTO GD_CON
VALUES (2097155,
2097154,
2097154,
0);
INSERT INTO GD_CTXT
VALUES (2097155,
0,
0,
0,
0,
0,
0,
1911,
1246,
2064,
1268,
1,
-6,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097358,
2097155,
1824,
1312,
1824,
1280,
0);
INSERT INTO GD_LS
VALUES (2097359,
2097155,
1824,
1280,
2144,
1280,
2097358);
INSERT INTO GD_LS
VALUES (2097360,
2097155,
2144,
1280,
2144,
1312,
2097359);
INSERT INTO GD_GE
VALUES (2097160,
2097153,
2097154,
42);
INSERT INTO GD_CON
VALUES (2097160,
2097154,
2097159,
0);
INSERT INTO GD_CTXT
VALUES (2097160,
0,
0,
0,
0,
0,
0,
1845,
1509,
1981,
1531,
-13,
-2,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097406,
2097160,
2000,
1488,
2000,
1552,
0);
INSERT INTO GD_GE
VALUES (2097204,
2097153,
2097156,
42);
INSERT INTO GD_CON
VALUES (2097204,
2097159,
2097159,
0);
INSERT INTO GD_CTXT
VALUES (2097204,
0,
0,
0,
0,
0,
0,
2327,
1640,
2463,
1662,
165,
1,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097400,
2097204,
2272,
1712,
2304,
1712,
0);
INSERT INTO GD_LS
VALUES (2097401,
2097204,
2304,
1712,
2304,
1584,
2097400);
INSERT INTO GD_LS
VALUES (2097402,
2097204,
2304,
1584,
2272,
1584,
2097401);
INSERT INTO GD_GE
VALUES (2097408,
2097153,
2097157,
42);
INSERT INTO GD_CON
VALUES (2097408,
2097159,
2097407,
0);
INSERT INTO GD_CTXT
VALUES (2097408,
0,
0,
0,
0,
0,
0,
1874,
1770,
1964,
1792,
-30,
-5,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097409,
2097408,
2000,
1744,
2000,
1824,
0);
INSERT INTO GD_GE
VALUES (2097412,
2097153,
2097158,
42);
INSERT INTO GD_CON
VALUES (2097412,
2097159,
2097411,
0);
INSERT INTO GD_CTXT
VALUES (2097412,
0,
0,
0,
0,
0,
0,
2340,
1761,
2443,
1783,
130,
-22,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097413,
2097412,
2267,
1739,
2373,
1829,
0);
INSERT INTO GD_GE
VALUES (2097414,
2097153,
2097159,
42);
INSERT INTO GD_CON
VALUES (2097414,
2097159,
2097410,
0);
INSERT INTO GD_CTXT
VALUES (2097414,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2097415,
2097414,
1765,
1739,
1664,
1824,
0);
INSERT INTO O_OBJ
VALUES (1048579,
'Driver',
3,
'D',
'',
1048578);
INSERT INTO O_NBATTR
VALUES (1048580,
1048579);
INSERT INTO O_BATTR
VALUES (1048580,
1048579);
INSERT INTO O_ATTR
VALUES (1048580,
1048579,
0,
'current_state',
'',
'',
'current_state',
0,
524295);
INSERT INTO SM_ISM
VALUES (2621445,
1048579);
INSERT INTO SM_SM
VALUES (2621445,
'',
5);
INSERT INTO SM_MOORE
VALUES (2621445);
INSERT INTO SM_EVTDI
VALUES (2621441,
2621445,
'count',
'',
524291);
INSERT INTO SM_SUPDT
VALUES (2621441,
2621445,
0);
INSERT INTO SM_SUPDT
VALUES (2621443,
2621445,
0);
INSERT INTO SM_SDI
VALUES (2621441,
2621443,
2621445);
INSERT INTO SM_STATE
VALUES (2621441,
2621445,
2621441,
'Begin Running',
1,
0);
INSERT INTO SM_LEVT
VALUES (2621441,
2621445,
2621441);
INSERT INTO SM_SEVT
VALUES (2621441,
2621445,
2621441);
INSERT INTO SM_EVT
VALUES (2621441,
2621445,
2621441,
1,
'Begin',
0,
'',
'D1',
'');
INSERT INTO SM_SEME
VALUES (2621441,
2621441,
2621445,
2621441);
INSERT INTO SM_LEVT
VALUES (2621442,
2621445,
2621443);
INSERT INTO SM_SEVT
VALUES (2621442,
2621445,
2621443);
INSERT INTO SM_EVT
VALUES (2621442,
2621445,
2621443,
2,
'Process',
0,
'',
'D2',
'');
INSERT INTO SM_SEME
VALUES (2621441,
2621442,
2621445,
2621443);
INSERT INTO SM_LEVT
VALUES (2621443,
2621445,
2621441);
INSERT INTO SM_SEVT
VALUES (2621443,
2621445,
2621441);
INSERT INTO SM_EVT
VALUES (2621443,
2621445,
2621441,
3,
'Shutdown',
0,
'',
'D3',
'');
INSERT INTO SM_EIGN
VALUES (2621441,
2621443,
2621445,
2621441,
'');
INSERT INTO SM_SEME
VALUES (2621441,
2621443,
2621445,
2621441);
INSERT INTO SM_STATE
VALUES (2621442,
2621445,
2621443,
'Processing',
2,
0);
INSERT INTO SM_EIGN
VALUES (2621442,
2621441,
2621445,
2621441,
'');
INSERT INTO SM_SEME
VALUES (2621442,
2621441,
2621445,
2621441);
INSERT INTO SM_SEME
VALUES (2621442,
2621442,
2621445,
2621443);
INSERT INTO SM_SEME
VALUES (2621442,
2621443,
2621445,
2621441);
INSERT INTO SM_STATE
VALUES (2621444,
2621445,
2621441,
'Shutdown',
3,
0);
INSERT INTO SM_EIGN
VALUES (2621444,
2621441,
2621445,
2621441,
'');
INSERT INTO SM_SEME
VALUES (2621444,
2621441,
2621445,
2621441);
INSERT INTO SM_EIGN
VALUES (2621444,
2621442,
2621445,
2621443,
'');
INSERT INTO SM_SEME
VALUES (2621444,
2621442,
2621445,
2621443);
INSERT INTO SM_EIGN
VALUES (2621444,
2621443,
2621445,
2621441,
'');
INSERT INTO SM_SEME
VALUES (2621444,
2621443,
2621445,
2621441);
INSERT INTO SM_NSTXN
VALUES (2621441,
2621445,
2621441,
2621441,
2621441);
INSERT INTO SM_TXN
VALUES (2621441,
2621445,
2621441,
2621441);
INSERT INTO SM_NSTXN
VALUES (2621442,
2621445,
2621441,
2621442,
2621443);
INSERT INTO SM_TXN
VALUES (2621442,
2621445,
2621442,
2621443);
INSERT INTO SM_NSTXN
VALUES (2621445,
2621445,
2621442,
2621442,
2621443);
INSERT INTO SM_TXN
VALUES (2621445,
2621445,
2621442,
2621443);
INSERT INTO SM_NSTXN
VALUES (2621446,
2621445,
2621442,
2621443,
2621441);
INSERT INTO SM_TXN
VALUES (2621446,
2621445,
2621444,
2621441);
INSERT INTO SM_MOAH
VALUES (2621441,
2621445,
2621441);
INSERT INTO SM_AH
VALUES (2621441,
2621445);
INSERT INTO SM_ACT
VALUES (2621441,
2621445,
1,
'generate D2:''Process''(count:0) to self;',
'');
INSERT INTO SM_MOAH
VALUES (2621442,
2621445,
2621442);
INSERT INTO SM_AH
VALUES (2621442,
2621445);
INSERT INTO SM_ACT
VALUES (2621442,
2621445,
1,
'if(rcvd_evt.count == 0)
select any run from instances of R where (selected.id == 0);
generate R1:''Begin Run''(count:rcvd_evt.count) to run;
elif(rcvd_evt.count == 1)
select many run_set from instances of R;
if ( ( cardinality run_set ) > 1 )
LOG::LogFailure( message:"Selection resulted in more than one instance." );
end if;
select any run from instances of R where (selected.id == 1);
generate R1:''Begin Run''(count:rcvd_evt.count) to run;
else
generate D3:''Shutdown'' to self;
end if;
',
'');
INSERT INTO SM_MOAH
VALUES (2621444,
2621445,
2621444);
INSERT INTO SM_AH
VALUES (2621444,
2621445);
INSERT INTO SM_ACT
VALUES (2621444,
2621445,
1,
'ARCH::shutdown();',
'');
INSERT INTO GD_MD
VALUES (2621441,
8,
2621445,
40,
1,
0,
1,
1,
0,
12,
1729,
4154,
0.828947,
0);
INSERT INTO GD_GE
VALUES (2621442,
2621441,
2621441,
41);
INSERT INTO GD_SHP
VALUES (2621442,
1920,
1168,
2320,
1328);
INSERT INTO GD_GE
VALUES (2621456,
2621441,
2621442,
41);
INSERT INTO GD_SHP
VALUES (2621456,
1920,
1376,
2320,
1600);
INSERT INTO GD_GE
VALUES (2621519,
2621441,
2621444,
41);
INSERT INTO GD_SHP
VALUES (2621519,
1920,
1648,
2320,
1824);
INSERT INTO GD_GE
VALUES (2621443,
2621441,
2621441,
42);
INSERT INTO GD_CON
VALUES (2621443,
2621442,
2621442,
0);
INSERT INTO GD_CTXT
VALUES (2621443,
0,
0,
0,
0,
0,
0,
2038,
1108,
2108,
1130,
-33,
0,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2621512,
2621443,
1952,
1168,
1952,
1136,
0);
INSERT INTO GD_LS
VALUES (2621513,
2621443,
1952,
1136,
2256,
1136,
2621512);
INSERT INTO GD_LS
VALUES (2621514,
2621443,
2256,
1136,
2256,
1168,
2621513);
INSERT INTO GD_GE
VALUES (2621457,
2621441,
2621442,
42);
INSERT INTO GD_CON
VALUES (2621457,
2621442,
2621456,
0);
INSERT INTO GD_CTXT
VALUES (2621457,
0,
0,
0,
0,
0,
0,
1945,
1340,
2085,
1362,
-5,
-3,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2621564,
2621457,
2096,
1328,
2096,
1376,
0);
INSERT INTO GD_GE
VALUES (2621515,
2621441,
2621445,
42);
INSERT INTO GD_CON
VALUES (2621515,
2621456,
2621456,
0);
INSERT INTO GD_CTXT
VALUES (2621515,
0,
0,
0,
0,
0,
0,
2368,
1480,
2508,
1502,
162,
1,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2621561,
2621515,
2320,
1568,
2352,
1568,
0);
INSERT INTO GD_LS
VALUES (2621562,
2621515,
2352,
1568,
2352,
1408,
2621561);
INSERT INTO GD_LS
VALUES (2621563,
2621515,
2352,
1408,
2320,
1408,
2621562);
INSERT INTO GD_GE
VALUES (2621520,
2621441,
2621446,
42);
INSERT INTO GD_CON
VALUES (2621520,
2621456,
2621519,
0);
INSERT INTO GD_CTXT
VALUES (2621520,
0,
0,
0,
0,
0,
0,
1968,
1615,
2065,
1637,
-41,
0,
0,
0,
0,
0,
0,
0);
INSERT INTO GD_LS
VALUES (2621557,
2621520,
2112,
1600,
2112,
1648,
0);
INSERT INTO GD_MD
VALUES (1048582,
5,
1048578,
11,
1,
0,
1,
1,
0,
12,
1600,
4199,
1.000000,
0);
INSERT INTO GD_GE
VALUES (1048589,
1048582,
1048577,
21);
INSERT INTO GD_SHP
VALUES (1048589,
1776,
1248,
2016,
1408);
INSERT INTO GD_GE
VALUES (1048592,
1048582,
1048578,
21);
INSERT INTO GD_SHP
VALUES (1048592,
1776,
1440,
2016,
1600);
INSERT INTO GD_GE
VALUES (1048596,
1048582,
1048579,
21);
INSERT INTO GD_SHP
VALUES (1048596,
2048,
1248,
2272,
1408);
INSERT INTO GD_MD
VALUES (1048583,
6,
1048578,
11,
1,
0,
1,
1,
0,
12,
1600,
4200,
1.000000,
0);
INSERT INTO GD_GE
VALUES (1048591,
1048583,
1048577,
21);
INSERT INTO GD_SHP
VALUES (1048591,
1776,
1248,
1968,
1312);
INSERT INTO GD_GE
VALUES (1048594,
1048583,
1048578,
21);
INSERT INTO GD_SHP
VALUES (1048594,
1776,
1440,
1968,
1504);
INSERT INTO GD_GE
VALUES (1048598,
1048583,
1048579,
21);
INSERT INTO GD_SHP
VALUES (1048598,
2048,
1248,
2240,
1312);
INSERT INTO GD_MD
VALUES (1048584,
7,
1048578,
11,
1,
0,
1,
1,
0,
12,
1600,
4200,
1.000000,
0);
INSERT INTO GD_GE
VALUES (1048590,
1048584,
1048577,
21);
INSERT INTO GD_SHP
VALUES (1048590,
1776,
1248,
1968,
1312);
INSERT INTO GD_GE
VALUES (1048593,
1048584,
1048578,
21);
INSERT INTO GD_SHP
VALUES (1048593,
1776,
1440,
1968,
1504);
INSERT INTO GD_GE
VALUES (1048597,
1048584,
1048579,
21);
INSERT INTO GD_SHP
VALUES (1048597,
2048,
1248,
2240,
1312);
|
<reponame>KRVPerera/SportsMeet<filename>MeetDataBaseGen/Scripts/meetDML.sql
BEGIN TRANSACTION;
TRUNCATE TABLE Districts;
TRUNCATE TABLE EducationZones;
INSERT INTO Districts (name) VALUES ('Central Province');
INSERT INTO Districts (name) VALUES ('Eastern Province');
INSERT INTO Districts (name) VALUES ('Northern Province');
INSERT INTO Districts (name) VALUES ('North Central Province');
INSERT INTO Districts (name) VALUES ('North Western Province');
INSERT INTO Districts (name) VALUES ('Sabaragamuwa Province');
INSERT INTO Districts (name) VALUES ('Southern Province');
INSERT INTO Districts (name) VALUES ('Uva Province');
INSERT INTO Districts (name) VALUES ('Western Province');
--INSERT INTO Districts (name) VALUES ('Galle');
--INSERT INTO Districts (name) VALUES ('Hambantota');
--INSERT INTO Districts (name) VALUES ('Matara');
--INSERT INTO Districts (name) VALUES ('Ampara');
--INSERT INTO Districts (name) VALUES ('Anuradhapura');
--INSERT INTO Districts (name) VALUES ('Badulla');
--INSERT INTO Districts (name) VALUES ('Batticaloa');
--INSERT INTO Districts (name) VALUES ('Colombo');
--INSERT INTO Districts (name) VALUES ('Gampaha');
--INSERT INTO Districts (name) VALUES ('Jaffna');
--INSERT INTO Districts (name) VALUES ('Kalutara');
--INSERT INTO Districts (name) VALUES ('Kandy');
--INSERT INTO Districts (name) VALUES ('Kegalle');
--INSERT INTO Districts (name) VALUES ('Kilinochchi');
--INSERT INTO Districts (name) VALUES ('Kurunegala');
--INSERT INTO Districts (name) VALUES ('Mannar');
--INSERT INTO Districts (name) VALUES ('Matale');
--INSERT INTO Districts (name) VALUES ('Monaragala');
--INSERT INTO Districts (name) VALUES ('Mullaitivu');
--INSERT INTO Districts (name) VALUES ('Nuwara Eliya ');
--INSERT INTO Districts (name) VALUES ('Polonnaruwa');
--INSERT INTO Districts (name) VALUES ('Puttalam');
--INSERT INTO Districts (name) VALUES ('Ratnapura');
--INSERT INTO Districts (name) VALUES ('Trincomalee');
--INSERT INTO Districts (name) VALUES ('Vavuniya');
INSERT INTO EducationZones (name) VALUES ('Piliyandala');
INSERT INTO EducationZones (name) VALUES ('Minuwangoda');
INSERT INTO EducationZones (name) VALUES ('Colombo');
INSERT INTO EducationZones (name) VALUES ('Gampaha');
INSERT INTO EducationZones (name) VALUES ('Homagama');
INSERT INTO EducationZones (name) VALUES ('Sri Jaya Pura');
INSERT INTO EducationZones (name) VALUES ('Negombo');
INSERT INTO EducationZones (name) VALUES ('Kelaniya');
INSERT INTO EducationZones (name) VALUES ('Kalutara');
INSERT INTO EducationZones (name) VALUES ('Matugama');
INSERT INTO EducationZones (name) VALUES ('Horana');
INSERT INTO EducationZones (name) VALUES ('Jafna');
INSERT INTO EducationZones (name) VALUES ('Island');
INSERT INTO EducationZones (name) VALUES ('Thenmarachchi');
INSERT INTO EducationZones (name) VALUES ('Valikamam');
INSERT INTO EducationZones (name) VALUES ('Vadamarachchi');
INSERT INTO EducationZones (name) VALUES ('Kilinochchi');
INSERT INTO EducationZones (name) VALUES ('Mannar');
INSERT INTO EducationZones (name) VALUES ('Madhu');
INSERT INTO EducationZones (name) VALUES ('Vauniya South');
INSERT INTO EducationZones (name) VALUES ('Vauniya North');
INSERT INTO EducationZones (name) VALUES ('Mulativu');
INSERT INTO EducationZones (name) VALUES ('Thunukkai');
INSERT INTO EducationZones (name) VALUES ('Anuradhapura');
INSERT INTO EducationZones (name) VALUES ('Kebithigollewa');
INSERT INTO EducationZones (name) VALUES ('Kekirawa');
INSERT INTO EducationZones (name) VALUES ('Thambuttegama');
INSERT INTO EducationZones (name) VALUES ('GalenBindunuWewa');
INSERT INTO EducationZones (name) VALUES ('Dimbulagala');
INSERT INTO EducationZones (name) VALUES ('Hingurakgoda');
INSERT INTO EducationZones (name) VALUES ('Polonnaruwa');
INSERT INTO EducationZones (name) VALUES ('Rathnapura');
INSERT INTO EducationZones (name) VALUES ('Balangoda');
INSERT INTO EducationZones (name) VALUES ('Nivithigala');
INSERT INTO EducationZones (name) VALUES ('Embilipitiya');
INSERT INTO EducationZones (name) VALUES ('Kegalla');
INSERT INTO EducationZones (name) VALUES ('Mawanella');
INSERT INTO EducationZones (name) VALUES ('Dehiovita');
INSERT INTO EducationZones (name) VALUES ('Kandy');
INSERT INTO EducationZones (name) VALUES ('Denuwara');
INSERT INTO EducationZones (name) VALUES ('Gampola');
INSERT INTO EducationZones (name) VALUES ('Teldeniya');
INSERT INTO EducationZones (name) VALUES ('Wattegama');
INSERT INTO EducationZones (name) VALUES ('Katugastota');
INSERT INTO EducationZones (name) VALUES ('Matale');
INSERT INTO EducationZones (name) VALUES ('Galewela');
INSERT INTO EducationZones (name) VALUES ('Naula');
INSERT INTO EducationZones (name) VALUES ('Wilgamuwa');
INSERT INTO EducationZones (name) VALUES ('NuwaraEliya');
INSERT INTO EducationZones (name) VALUES ('Kothmale');
INSERT INTO EducationZones (name) VALUES ('Hatton');
INSERT INTO EducationZones (name) VALUES ('Walapane');
INSERT INTO EducationZones (name) VALUES ('Hanguranketha');
INSERT INTO EducationZones (name) VALUES ('Kurunegala');
INSERT INTO EducationZones (name) VALUES ('Kuliyapitiya');
INSERT INTO EducationZones (name) VALUES ('Nikaweratiya');
INSERT INTO EducationZones (name) VALUES ('Maho');
INSERT INTO EducationZones (name) VALUES ('Giriulla');
INSERT INTO EducationZones (name) VALUES ('Ibbagamuwa');
INSERT INTO EducationZones (name) VALUES ('Puttalam');
INSERT INTO EducationZones (name) VALUES ('Chilaw');
INSERT INTO EducationZones (name) VALUES ('Galle');
INSERT INTO EducationZones (name) VALUES ('Elpitiya');
INSERT INTO EducationZones (name) VALUES ('Ambalangoda');
INSERT INTO EducationZones (name) VALUES ('Udugama');
INSERT INTO EducationZones (name) VALUES ('Matara');
INSERT INTO EducationZones (name) VALUES ('Akuressa');
INSERT INTO EducationZones (name) VALUES ('Mulatiyana(Hakmana)');
INSERT INTO EducationZones (name) VALUES ('Morawaka');
INSERT INTO EducationZones (name) VALUES ('Tangalle');
INSERT INTO EducationZones (name) VALUES ('Hambantota');
INSERT INTO EducationZones (name) VALUES ('Walasmulla');
INSERT INTO EducationZones (name) VALUES ('Badulla');
INSERT INTO EducationZones (name) VALUES ('Viyaluwa');
INSERT INTO EducationZones (name) VALUES ('Bandarawela');
INSERT INTO EducationZones (name) VALUES ('Mahiyanganaya');
INSERT INTO EducationZones (name) VALUES ('Welimada');
INSERT INTO EducationZones (name) VALUES ('Passara');
INSERT INTO EducationZones (name) VALUES ('Monaragala');
INSERT INTO EducationZones (name) VALUES ('Wellawaya');
INSERT INTO EducationZones (name) VALUES ('Bibile');
INSERT INTO EducationZones (name) VALUES ('Batticaloa');
INSERT INTO EducationZones (name) VALUES ('Kalkudha');
INSERT INTO EducationZones (name) VALUES ('Paddiruppu');
INSERT INTO EducationZones (name) VALUES ('Batticaloa Central');
INSERT INTO EducationZones (name) VALUES ('Batticaloa West');
INSERT INTO EducationZones (name) VALUES ('Ampara');
INSERT INTO EducationZones (name) VALUES ('Kalmunai');
INSERT INTO EducationZones (name) VALUES ('Smmanthurai');
INSERT INTO EducationZones (name) VALUES ('MahaOya');
INSERT INTO EducationZones (name) VALUES ('DehiattaKandiya');
INSERT INTO EducationZones (name) VALUES ('Akkaraipattu');
INSERT INTO EducationZones (name) VALUES ('Thirukkovil');
INSERT INTO EducationZones (name) VALUES ('Trincomalee');
INSERT INTO EducationZones (name) VALUES ('Mutur');
INSERT INTO EducationZones (name) VALUES ('Kantalai');
INSERT INTO EducationZones (name) VALUES ('Kinniya');
INSERT INTO EducationZones (name) VALUES ('Trincomalee North');
COMMIT TRANSACTION; |
<reponame>viswaratha12/dbwarden
/****** Object: StoredProcedure [dbo].[UpdateManagerStatus] ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE dbo.UpdateManagerStatus
/****************************************************
**
** ####################################################
** Note: This procedure is obsolete
** It has been superseded by UpdateManagerAndTaskStatus
** ####################################################
**
** Desc: Logs the current status of the given analysis manager
**
** Return values: 0: success, otherwise, error code
**
** Parameters:
**
** Auth: mem
** 03/24/2009 mem - Initial version
** 03/26/2009 mem - Added parameter @MostRecentJobInfo
** 03/31/2009 mem - Added parameter @DSScanCount
** 04/09/2009 grk - @message needs to be initialized to '' inside body of sproc
** 06/26/2009 mem - Updated to support new field names in T_Processor_Status
**
*****************************************************/
(
@ProcessorName varchar(128),
@StatusCode int, -- See T_Processor_Status_Codes; 0=Idle, 1=Running, 2=Stopped, 3=Starting, 4=Closing, 5=Retrieving Dataset, 6=Disabled, 7=FlagFileExists
@Job int,
@JobStep int,
@StepTool varchar(128),
@Dataset varchar(256),
@DurationHours real,
@Progress real,
@DSScanCount int=0, -- The total number of spectra that need to be processed (or have been generated). For Sequest, this is the DTA count
@MostRecentJobInfo varchar(256) = '',
@MostRecentLogMessage varchar(1024) = '',
@MostRecentErrorMessage varchar(1024) = '',
@message varchar(512)='' output
)
As
set nocount on
declare @myError int
declare @myRowCount int
set @myError = 0
set @myRowCount = 0
set @message = ''
declare @MgrStatusCode int
declare @TaskStatusCode int
declare @TaskDetailStatusCode int
---------------------------------------------------
-- Validate the inputs; clear the outputs
---------------------------------------------------
Set @processorName = IsNull(@processorName, '')
Set @StatusCode = IsNull(@StatusCode, 0)
Set @Job = IsNull(@Job, Null)
Set @JobStep = IsNull(@JobStep, Null)
Set @StepTool = IsNull(@StepTool, '')
Set @Dataset = IsNull(@Dataset, '')
Set @DurationHours = IsNull(@DurationHours, Null)
Set @Progress = IsNull(@Progress, Null)
Set @DSScanCount = IsNull(@DSScanCount, 0)
Set @MostRecentJobInfo = IsNull(@MostRecentJobInfo, '')
Set @MostRecentLogMessage = IsNull(@MostRecentLogMessage, '')
Set @MostRecentErrorMessage = IsNull(@MostRecentErrorMessage, '')
Set @message = ''
If Len(@processorName) = 0
Begin
Set @message = 'Processor name is empty; unable to continue'
Goto Done
End
Set @TaskStatusCode = @StatusCode
Set @TaskDetailStatusCode = 5 -- No task
If @TaskStatusCode = 0
Set @MgrStatusCode = 0
If @TaskStatusCode IN (1,2,3)
Begin
Set @MgrStatusCode = 2 -- Running
Set @TaskDetailStatusCode = 1 -- Running
End
If @TaskStatusCode = 4
Set @MgrStatusCode = 1
If @TaskStatusCode = 5
Set @MgrStatusCode = 0
-- Check whether this processor is missing from T_Processor_Status
If Not Exists (SELECT * FROM T_Processor_Status WHERE Processor_Name = @processorName)
Begin
-- Processor is missing; add it
INSERT INTO T_Processor_Status (Processor_Name, Mgr_Status_Code, Task_Status_Code, Task_Detail_Status_Code)
VALUES (@processorName, @MgrStatusCode, @TaskStatusCode, @TaskDetailStatusCode)
End
UPDATE T_Processor_Status
SET
Mgr_Status_Code = @MgrStatusCode,
Status_Date = GetDate(),
Last_Start_Time = Null,
CPU_Utilization = Null,
Free_Memory_MB = Null,
Most_Recent_Error_Message = CASE WHEN @MostRecentErrorMessage <> '' THEN @MostRecentErrorMessage ELSE Most_Recent_Error_Message END,
Step_Tool = @StepTool,
Task_Status_Code = @TaskStatusCode,
Duration_Hours = @DurationHours,
Progress = @Progress,
Current_Operation = '',
Task_Detail_Status_Code = @TaskDetailStatusCode,
Job = @Job,
Job_Step = @JobStep,
Dataset = @Dataset,
Most_Recent_Log_Message = CASE WHEN @MostRecentLogMessage <> '' THEN @MostRecentLogMessage ELSE Most_Recent_Log_Message END,
Most_Recent_Job_Info = CASE WHEN @MostRecentJobInfo <> '' THEN @MostRecentJobInfo ELSE Most_Recent_Job_Info END,
Spectrum_Count = @DSScanCount
WHERE Processor_Name = @processorName
--
SELECT @myError = @@error, @myRowCount = @@rowcount
--
if @myError <> 0
begin
set @message = 'Error Updating T_Processor_Status'
goto Done
end
---------------------------------------------------
-- Exit
---------------------------------------------------
--
Done:
--
return @myError
GO
GRANT VIEW DEFINITION ON [dbo].[UpdateManagerStatus] TO [Limited_Table_Write] AS [dbo]
GO
GRANT VIEW DEFINITION ON [dbo].[UpdateManagerStatus] TO [PNL\D3M578] AS [dbo]
GO
GRANT VIEW DEFINITION ON [dbo].[UpdateManagerStatus] TO [PNL\D3M580] AS [dbo]
GO
|
<reponame>LabKey/wnprc-modules<gh_stars>1-10
/*
* Copyright (c) 2011-2015 LabKey Corporation
*
* Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
*
*/
/*
* This query provides a calendar view of feces to more easily see trends for individual monkeys.
*/
SELECT
fecesobs.Id,
fecesobs.year,
fecesobs.monthName,
fecesobs.monthNum,
fecesobs.day,
group_concat(DISTINCT fecesobs.feces) as feces
FROM (
SELECT
obs.Id,
obs.date,
convert(year(obs.date), integer) as year,
monthname(obs.date) AS monthName,
convert(month(obs.date), integer) AS monthNum,
convert(dayofmonth(obs.date), integer) as day,
obs.feces
FROM study."Irregular Obs No Okays" obs
WHERE obs.feces IS NOT NULL
) fecesobs
GROUP BY fecesobs.id, fecesobs.year, fecesobs.monthName, fecesobs.monthNum, fecesobs.day
PIVOT feces BY day |
create database if not exists `try_game_1` default charset= utf8;
use `try_game_1`;
create table if not exists `global` (
`key` int(10) not null,
`dataVersion` int(10) not null,
`saveDate` date not null,
`data` longblob not null,
primary key (`key`)
) engine=InnoDB default charset= utf8 comment='全局表';
create table if not exists `player` (
`playerID` bigint(20) not null,
`name` varchar(128) not null,
`userID` int(10) not null,
`createAreaID` int(10) not null,
`uid` varchar(128) not null,
`platform` varchar(128) not null,
`appVersion` int(10) not null,
`sourceVersion` int(10) not null,
`createDate` date not null,
`saveDate` date not null,
`loginData` longblob not null,
`data` longblob not null,
`offlineData` longblob not null,
primary key (`playerID`),
key `"name"` (`name`),
key `"userID_createAreaID"` (`userID`,`createAreaID`),
key `"uid"` (`uid`)
) engine=InnoDB default charset= utf8 comment='角色表'; |
CREATE TABLE [dbo].[AdminUnitType] (
[AdminUnitTypeID] INT IDENTITY (1, 1) NOT NULL,
[Name] NVARCHAR (50) NOT NULL,
[NameAM] NVARCHAR (50) NULL,
[SortOrder] INT CONSTRAINT [DF_AdminUnitType_SortOrder] DEFAULT ((0)) NOT NULL,
CONSTRAINT [PK_AdministrativeUnitType] PRIMARY KEY CLUSTERED ([AdminUnitTypeID] ASC)
);
GO
EXECUTE sp_addextendedproperty @name = N'MS_Description', @value = N'Represents the different types of Administrative Units', @level0type = N'SCHEMA', @level0name = N'dbo', @level1type = N'TABLE', @level1name = N'AdminUnitType';
GO
EXECUTE sp_addextendedproperty @name = N'MS_Description', @value = N'Primary Key field for Administrative Unit Type.', @level0type = N'SCHEMA', @level0name = N'dbo', @level1type = N'TABLE', @level1name = N'AdminUnitType', @level2type = N'COLUMN', @level2name = N'AdminUnitTypeID';
GO
EXECUTE sp_addextendedproperty @name = N'MS_Description', @value = N'Name of the administative unit type', @level0type = N'SCHEMA', @level0name = N'dbo', @level1type = N'TABLE', @level1name = N'AdminUnitType', @level2type = N'COLUMN', @level2name = N'Name';
|
<reponame>scotas/ols
create table emails (
emailFrom VARCHAR2(256),
emailTo VARCHAR2(256),
subject VARCHAR2(4000),
emailDate DATE,
bodyText CLOB)
;
-- Required types to test RHighlight pipeline function
CREATE TYPE EMAILR AS OBJECT
(
sc NUMBER,
emailFrom VARCHAR2(256),
emailTo VARCHAR2(256),
subject VARCHAR2(4000),
emailDate DATE,
bodyText CLOB
)
;
CREATE OR REPLACE TYPE EMAILRSET AS TABLE OF EMAILR
;
create index emailbodytext on emails(bodytext) indextype is lucene.luceneindex
parameters('ExtraCols:emailFrom "dictionary", emailDate "emailDate",subject "subject",emailFrom "emailFrom",emailTo "emailTo"')
;
alter index emailbodyText parameters('LogLevel:ALL;FormatCols:dictionary(ANALYZED_WITH_POSITIONS_OFFSETS), subject(NOT_ANALYZED),emailFrom(NOT_ANALYZED),emailTo(NOT_ANALYZED)')
;
alter index emailbodyText parameters('Formatter:org.apache.lucene.search.highlight.MyHTMLFormatter;MaxNumFragmentsRequired:3;FragmentSeparator:...;FragmentSize:50')
;
alter index emailbodytext parameters('PerFieldAnalyzer:dictionary(org.apache.lucene.analysis.core.WhitespaceAnalyzer),BODYTEXT(org.apache.lucene.analysis.core.StopAnalyzer)')
;
CREATE OR REPLACE TRIGGER L$emailbodyText
BEFORE UPDATE OF emailDate,subject ON emails
FOR EACH ROW
BEGIN
:new.bodyText := :new.bodyText;
END
;
create index emailFromIdx on emails(emailFrom)
;
create index emailToIdx on emails(emailTo)
;
create index emailDateIdx on emails(emaildate)
;
insert into emails values ('<EMAIL>','<EMAIL>',
'boosting instead of sorting WAS: to boost or not to boost',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-7,
'Hi Daniel,
>> so a doc from 1973 should get a boost of 1.1973 and a doc of 1975 should
>> get a boost of 1.1975 .
>
> The boost is stored with a limited resolution. Try boosting one doc by 10,
> the other one by 20 or something like that.
You''re right. I thought that with the float values the resolution should
be good enough!
But there is only a difference in the score with a boosting diff of 0.2
(e.g. 1.7 and 1.9).
I know that there were many questions on the list regarding scoring
better new documents.
But I want to avoid any overhead like "FunctionQuery" at query time,
and in my case I have some documents
which have same values in many fields (=>same score) and the only
difference is the year.
However I don''t want to overboost the score so that the scoring for
other criteria is not considered.
Shortly spoken: As a result of a search I have a list of book titles and
I want a sort by score AND by year of publication.
But for performance reasons I want to avoid this sorting at query-time
by boosting at index time.
Is that possible?
thanks,
Martin');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: boosting instead of sorting WAS: to boost or not to boost',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-6,
'<NAME> wrote:
> Hi Daniel,
>
>
>>> so a doc from 1973 should get a boost of 1.1973 and a doc of 1975 should
>>> get a boost of 1.1975 .
>>>
>> The boost is stored with a limited resolution. Try boosting one doc by 10,
>> the other one by 20 or something like that.
>>
>
> You''re right. I thought that with the float values the resolution should
> be good enough!
> But there is only a difference in the score with a boosting diff of 0.2
> (e.g. 1.7 and 1.9).
>
> I know that there were many questions on the list regarding scoring
> better new documents.
> But I want to avoid any overhead like "FunctionQuery" at query time,
> and in my case I have some documents
> which have same values in many fields (=>same score) and the only
> difference is the year.
>
> However I don''t want to overboost the score so that the scoring for
> other criteria is not considered.
>
> Shortly spoken: As a result of a search I have a list of book titles and
> I want a sort by score AND by year of publication.
>
> But for performance reasons I want to avoid this sorting at query-time
> by boosting at index time.
>
> Is that possible?
>
Here''s the trick that works for me, without the issues of boost
resolution or FunctionQuery.
Add a separate field, say "days", in which you will put as many "1" as
many days elapsed since the epoch (not neccessarily since 1 Jan 1970 -
pick a date that makes sense for you). Then, if you want to prioritize
newer documents, just add "+days:1" to your query. Voila - the final
results are a sum of other score factors plus a score factor that is
higher for more recent document, containing more 1-s.
If you are dealing with large time spans, you can split this into years
and days-in-a-year, and apply query boosts, like "+years:1^10.0
+days:1^0.02". Do some experiments and find what works best for you.
--
Best regards,
<NAME> <><
___. ___ ___ ___ _ _ __________________________________
[__ || __|__/|__||\/| Information Retrieval, Semantic Web
___|||__|| \| || | Embedded Unix, System Integration
http://www.sigram.com Contact: info at sigram dot com');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: boosting instead of sorting WAS: to boost or not to boost',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-5,
'On Thursday 21 December 2006 10:55, <NAME> wrote:
> and in my case I have some documents
> which have same values in many fields (=>same score) and the only
> difference is the year.
Andrzej''s response sounds like a good solution, so just for completeness:
you can sort by more than one criterion, e.g. first by score, then by
date.
regards
Daniel
--
http://www.danielnaber.de');
insert into emails values ('<EMAIL>','<EMAIL>',
'lucene injection',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-4,
'I am bothered about security problems with lucene. Is it vulnerable to
any kind of injection like mysql injection? many times the query from
user is passed to lucene for search without validating.
');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: lucene injection',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-3,
'On Dec 21, 2006, at 4:56 AM, Deepan wrote:
> I am bothered about security problems with lucene. Is it vulnerable to
> any kind of injection like mysql injection? many times the query from
> user is passed to lucene for search without validating.
Rest easy. There are no known security issues with Lucene, and it
has even undergone a recent static code analysis by Fortify (see the
lucene-dev e-mail list archives). Unlike SQL, there is no
destructive behavior available through the QueryParser.
Erik');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: lucene injection',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-2,
'On Thu, 2006-12-21 at 05:04 -0500, <NAME> wrote:
> On Dec 21, 2006, at 4:56 AM, Deepan wrote:
> > I am bothered about security problems with lucene. Is it vulnerable to
> > any kind of injection like mysql injection? many times the query from
> > user is passed to lucene for search without validating.
>
> Rest easy. There are no known security issues with Lucene, and it
> has even undergone a recent static code analysis by Fortify (see the
> lucene-dev e-mail list archives). Unlike SQL, there is no
> destructive behavior available through the QueryParser.
thanks Erik,
>
> Erik
>
>
> ---------------------------------------------------------------------
> To unsubscribe, e-mail: <EMAIL>
> For additional commands, e-mail: <EMAIL>
>');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: lucene injection',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN')-1,
'On Thursday 21 December 2006 10:56, Deepan wrote:
> I am bothered about security problems with lucene. Is it vulnerable to
> any kind of injection like mysql injection? many times the query from
> user is passed to lucene for search without validating.
This is only an issue if your index has permission information and you
modify the user''s query so that only parts of the index are visible to
him. For example, if you add "+permission:user" to the query the user
might add something like "OR permission:admin" to get access to more
documents. This is also why you should add new parts to the query
programmatically (BooleanQuery) to avoid the use of QueryParser.
Regards
Daniel
--
http://www.danielnaber.de');
insert into emails values ('<EMAIL>','<EMAIL>',
'Re: lucene injection',to_date('FRI, 31 AUG 2007 06:28:19 GMT','DY, dd MON YYYY HH24:MI:SS "GMT"','NLS_DATE_LANGUAGE = AMERICAN'),
'On Thursday 21 December 2006 10:56, Deepan wrote:
> I am bothered about security problems with lucene. Is it vulnerable to
> any kind of injection like mysql injection? many times the query from
> user is passed to lucene for search without validating.
This is only an issue if your index has permission information and you
modify the user''s query so that only parts of the index are visible to
him. For example, if you add "+permission:user" to the query the user
might add something like "OR permission:admin" to get access to more
documents. This is also why you should add new parts to the query
programmatically (BooleanQuery) to avoid the use of QueryParser.
Regards
Daniel
--
http://www.danielnaber.de');
|
SELECT 'Upgrading MetaStore schema from 1.2.0 to 1.2.1000' AS MESSAGE;
--:r 008-HIVE-12807.mssql.sql
ALTER TABLE COMPACTION_QUEUE ADD CQ_HIGHEST_TXN_ID bigint NULL;
--:r 009-HIVE-12814.mssql.sql
ALTER TABLE COMPACTION_QUEUE ADD CQ_META_INFO varbinary(2048) NULL;
--:r 010-HIVE-12816.mssql.sql
ALTER TABLE COMPACTION_QUEUE ADD CQ_HADOOP_JOB_ID nvarchar(32) NULL;
--:r 011-HIVE-12818.mssql.sql
CREATE TABLE COMPLETED_COMPACTIONS (
CC_ID bigint NOT NULL,
CC_DATABASE nvarchar(128) NOT NULL,
CC_TABLE nvarchar(128) NOT NULL,
CC_PARTITION nvarchar(767) NULL,
CC_STATE char(1) NOT NULL,
CC_TYPE char(1) NOT NULL,
CC_WORKER_ID nvarchar(128) NULL,
CC_START bigint NULL,
CC_END bigint NULL,
CC_RUN_AS nvarchar(128) NULL,
CC_HIGHEST_TXN_ID bigint NULL,
CC_META_INFO varbinary(2048) NULL,
CC_HADOOP_JOB_ID nvarchar(128) NULL,
PRIMARY KEY CLUSTERED
(
CC_ID ASC
)
);
--:r 012-HIVE-12819.mssql.sql
ALTER TABLE TXNS ADD TXN_AGENT_INFO nvarchar(128) NULL;
--:r 013-HIVE-12821.mssql.sql
ALTER TABLE TXNS ADD TXN_HEARTBEAT_COUNT int NULL;
ALTER TABLE HIVE_LOCKS ADD HL_HEARTBEAT_COUNT int NULL;
--:r 014-HIVE-12822.mssql.sql
ALTER TABLE TXNS ADD TXN_META_INFO nvarchar(128) NULL;
--:r 015-HIVE-12823.mssql.sql
ALTER TABLE HIVE_LOCKS ADD HL_AGENT_INFO nvarchar(128) NULL;
--:r 016-HIVE-12831.mssql.sql
ALTER TABLE HIVE_LOCKS ADD HL_BLOCKEDBY_EXT_ID bigint NULL;
ALTER TABLE HIVE_LOCKS ADD HL_BLOCKEDBY_INT_ID bigint NULL;
--:r 017-HIVE-12832.mssql.sql
CREATE TABLE AUX_TABLE (
MT_KEY1 nvarchar(128) NOT NULL,
MT_KEY2 bigint NOT NULL,
MT_COMMENT nvarchar(255) NULL,
PRIMARY KEY CLUSTERED
(
MT_KEY1 ASC,
MT_KEY2 ASC
)
);
UPDATE VERSION SET SCHEMA_VERSION='1.2.1000', VERSION_COMMENT='Hive release version 1.2.1000' where VER_ID=1;
SELECT 'Finished upgrading MetaStore schema from 1.2.0 to 1.2.1000' AS MESSAGE;
|
-- create 2 users
put password '<PASSWORD>' for 'root';
put password '<PASSWORD>' for 'foo';
-- grant OP to 'root
grant op to 'root';
set user 'root'
set password '<PASSWORD>'
-- i am an OP user
put authentication on
-- authentication enabled
select * from comdb2_users;
create table t2(i int)$$
grant read on 't2' to 'foo';
grant ddl on 't2' to 'foo';
-- create time paritions
create time partition on t2 as t2_tp period 'yearly' retention 2 start '2018-01-01';
select sleep(5);
#select 'time partitions' as rows;
#select * from comdb2_timepartitions;
select 'time partition shards' as rows;
select count(*) from comdb2_timepartshards;
select 'time partition shard permissions' as rows;
select * from comdb2_timepartpermissions;
select 'table permissions' as rows;
select username, READ, WRITE, DDL from comdb2_tablepermissions where username = 'foo' and tablename like '$%';
insert into t2_tp values(1);
-- login as non-OP user
set user foo
set password <PASSWORD>
-- the following command must succeed
alter table t2_tp { schema { int i int j null=yes } }$$
insert into t2_tp values(2, 2);
select * from t2_tp order by 1;
-- try revoking write permission
set user 'root'
set password '<PASSWORD>'
revoke ddl on 't2_tp' to 'foo';
select 'table permissions' as rows;
select username, READ, WRITE, DDL from comdb2_tablepermissions where username = 'foo' and tablename like '$%';
-- login as non-OP user
set user foo
set password <PASSWORD>
-- the following alter must fail now
alter table t2_tp { schema { int i int j null=yes } }$$
select * from t2_tp order by 1;
-- cleanup
set user 'root'
set password '<PASSWORD>'
put authentication off;
put password off for 'root'
put password off for '<PASSWORD>'
select * from comdb2_users;
|
-- the setting exists and server does not crash
SET os_thread_priority = 10;
SELECT count() FROM numbers(1000);
|
<filename>share/sql/upgrade-1.2.1-1.2.2.sql<gh_stars>1-10
SET search_path TO monitoring, public;
CREATE OR REPLACE FUNCTION metric_tables_config() RETURNS json
LANGUAGE plpgsql
AS $$
DECLARE
v_query JSON;
v_conf JSON;
q_metric_sessions_agg TEXT;
q_metric_xacts_agg TEXT;
q_metric_locks_agg TEXT;
q_metric_blocks_agg TEXT;
q_metric_bgwriter_agg TEXT;
q_metric_db_size_agg TEXT;
q_metric_tblspc_size_agg TEXT;
q_metric_filesystems_size_agg TEXT;
q_metric_temp_files_size_tblspc_agg TEXT;
q_metric_temp_files_size_db_agg TEXT;
q_metric_wal_files_agg TEXT;
q_metric_cpu_agg TEXT;
q_metric_process_agg TEXT;
q_metric_memory_agg TEXT;
q_metric_loadavg_agg TEXT;
q_metric_vacuum_analyze_agg TEXT;
BEGIN
--
-- Query template list for the actions: 'history' and 'expand'
-- 'history': Move data from metric_<type>_current to metric_<type>_history, grouping records into array of records.
-- 'expand': Return data from both metric_<type>_current and metric_<type>_history tables, depending on the time interval.
--
SELECT '{
"history": {
"host_id": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), host_id, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# GROUP BY date_trunc(''day'', datetime),2 ORDER BY 1,2 ASC;",
"instance_id": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), instance_id, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# GROUP BY date_trunc(''day'', datetime),2 ORDER BY 1,2 ASC;",
"dbname": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), instance_id, dbname, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# GROUP BY date_trunc(''day'', datetime),2,3 ORDER BY 1,2 ASC;",
"spcname": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), instance_id, spcname, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# GROUP BY date_trunc(''day'', datetime),2,3 ORDER BY 1,2,3 ASC;",
"mount_point": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), host_id, mount_point, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# AS deleted_rows GROUP BY date_trunc(''day'', datetime),2,3 ORDER BY 1,2,3 ASC;",
"cpu": "INSERT INTO #history_table# SELECT tstzrange(min(datetime), max(datetime)), host_id, cpu, array_agg(set_datetime_record(datetime, record)::#record_type#) AS records FROM #current_table# AS deleted_rows GROUP BY date_trunc(''day'', datetime),2,3 ORDER BY 1,2,3 ASC;"
},
"expand": {
"host_id": "WITH expand AS (SELECT datetime, host_id, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, host_id, hist_query.record FROM (SELECT host_id, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;",
"instance_id": "WITH expand AS (SELECT datetime, instance_id, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, instance_id, hist_query.record FROM (SELECT instance_id, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;",
"dbname": "WITH expand AS (SELECT datetime, instance_id, dbname, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, instance_id, dbname, hist_query.record FROM (SELECT instance_id, dbname, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;",
"spcname":"WITH expand AS (SELECT datetime, instance_id, spcname, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, instance_id, spcname, hist_query.record FROM (SELECT instance_id, spcname, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;",
"mount_point": "WITH expand AS (SELECT datetime, host_id, mount_point, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, host_id, mount_point, hist_query.record FROM (SELECT host_id, mount_point, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;",
"cpu": "WITH expand AS (SELECT datetime, host_id, cpu, record FROM #current_table# WHERE #where_current# UNION SELECT (hist_query.record).datetime, host_id, cpu, hist_query.record FROM (SELECT host_id, cpu, unnest(records)::#record_type# AS record FROM #history_table# WHERE #where_history#) AS hist_query) SELECT * FROM expand WHERE datetime <@ #tstzrange# ORDER BY datetime ASC;"
}
}'::JSON INTO v_query;
--
-- Global configuration.
--
-- For each type of metric we have to deal with, there is the following object defining some properties:
-- // Unique key used to find the configuration based on the metric name.
-- "<metric_name>": {
-- // Tables name prefix, for ease stuff it should be the same as <metric_name>
-- "name": "<metric_tbl_name>",
-- // Record composite type
-- "record_type": "<metric_record_type>",
-- // List of extra columns.
-- "columns": [
-- {
-- // Column name
-- "name": "<column_name>",
-- // Column data type
-- "data_type": "<column_data_type>"
-- },
-- [...]
-- ],
-- // Query template use to history data.
-- "history": "<query_tpl_history>",
-- // Query template use to fetch data from both _current & _history tables.
-- "expand": "<query_tpl_expand>",
-- // Query template use to aggregate data.
-- "aggregate": "<query_tpl_aggregate>"
-- }
q_metric_sessions_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
AVG((r).active),
AVG((r).waiting),
AVG((r).idle),
AVG((r).idle_in_xact),
AVG((r).idle_in_xact_aborted),
AVG((r).fastpath),
AVG((r).disabled),
AVG((r).no_priv)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_xacts_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).n_commit),
SUM((r).n_rollback)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_locks_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
AVG((r).access_share),
AVG((r).row_share),
AVG((r).row_exclusive),
AVG((r).share_update_exclusive),
AVG((r).share),
AVG((r).share_row_exclusive),
AVG((r).exclusive),
AVG((r).access_exclusive),
AVG((r).siread),
AVG((r).waiting_access_share),
AVG((r).waiting_row_share),
AVG((r).waiting_row_exclusive),
AVG((r).waiting_share_update_exclusive),
AVG((r).waiting_share),
AVG((r).waiting_share_row_exclusive),
AVG((r).waiting_exclusive),
AVG((r).waiting_access_exclusive)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_blocks_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).blks_read),
SUM((r).blks_hit),
AVG((r).hitmiss_ratio)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_bgwriter_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).checkpoints_timed),
SUM((r).checkpoints_req),
SUM((r).checkpoint_write_time),
SUM((r).checkpoint_sync_time),
SUM((r).buffers_checkpoint),
SUM((r).buffers_clean),
SUM((r).maxwritten_clean),
SUM((r).buffers_backend),
SUM((r).buffers_backend_fsync),
SUM((r).buffers_alloc),
NULL
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2
ORDER BY 1,2
ON CONFLICT (datetime, instance_id)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_db_size_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
AVG((r).size)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_tblspc_size_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
spcname,
ROW(
NULL,
AVG((r).size)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
spcname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, spcname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_filesystems_size_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
host_id,
mount_point,
ROW(
NULL,
AVG((r).used),
AVG((r).total),
NULL
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
host_id integer,
mount_point text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, host_id, mount_point)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_temp_files_size_tblspc_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
spcname,
ROW(
NULL,
AVG((r).size)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
spcname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, spcname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_temp_files_size_db_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
AVG((r).size)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_wal_files_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
ROW(
NULL,
SUM((r).measure_interval),
MAX((r).written_size),
MIN((r).current_location::pg_lsn)::TEXT,
MAX((r).total),
MAX((r).archive_ready),
MAX((r).total_size)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2
ORDER BY 1,2
ON CONFLICT (datetime, instance_id)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_cpu_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
host_id,
cpu,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).time_user),
SUM((r).time_system),
SUM((r).time_idle),
SUM((r).time_iowait),
SUM((r).time_steal)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
host_id integer,
cpu text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, host_id, cpu)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_process_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
host_id,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).context_switches),
SUM((r).forks),
AVG((r).procs_running),
AVG((r).procs_blocked),
AVG((r).procs_total)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
host_id integer,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2
ORDER BY 1,2
ON CONFLICT (datetime, host_id)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_memory_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
host_id,
ROW(
NULL,
AVG((r).mem_total),
AVG((r).mem_used),
AVG((r).mem_free),
AVG((r).mem_buffers),
AVG((r).mem_cached),
AVG((r).swap_total),
AVG((r).swap_used)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
host_id integer,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2
ORDER BY 1,2
ON CONFLICT (datetime, host_id)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_loadavg_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
host_id,
ROW(
NULL,
ROUND(AVG((r).load1)::NUMERIC, 2),
ROUND(AVG((r).load5)::NUMERIC, 2),
ROUND(AVG((r).load15)::NUMERIC, 2)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
host_id integer,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2
ORDER BY 1,2
ON CONFLICT (datetime, host_id)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
q_metric_vacuum_analyze_agg := replace(to_json($_$
INSERT INTO #agg_table#
SELECT
truncate_time(datetime, '#interval#') AS datetime,
instance_id,
dbname,
ROW(
NULL,
SUM((r).measure_interval),
SUM((r).n_vacuum),
SUM((r).n_analyze),
SUM((r).n_autovacuum),
SUM((r).n_autoanalyze)
)::#record_type#,
COUNT(*) AS w
FROM
expand_data('#name#', (SELECT tstzrange(MAX(datetime), MAX(datetime) + '1 day'::interval) FROM #agg_table#))
AS (
datetime timestamp with time zone,
instance_id integer,
dbname text,
r #record_type#
)
WHERE
truncate_time(datetime, '#interval#') < truncate_time((SELECT MAX(datetime) + '1 day'::interval FROM #agg_table#), '#interval#')
GROUP BY 1,2,3
ORDER BY 1,2,3
ON CONFLICT (datetime, instance_id, dbname)
DO UPDATE SET w = EXCLUDED.w, record = EXCLUDED.record
WHERE #agg_table#.w < EXCLUDED.w
$_$::TEXT)::TEXT, '\n', ' ');
SELECT ('{
"metric_sessions": {
"name": "metric_sessions",
"record_type": "metric_sessions_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_sessions_agg||'
},
"metric_xacts": {
"name": "metric_xacts",
"record_type": "metric_xacts_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_xacts_agg||'
},
"metric_locks": {
"name": "metric_locks",
"record_type": "metric_locks_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_locks_agg||'
},
"metric_blocks": {
"name": "metric_blocks",
"record_type": "metric_blocks_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_blocks_agg||'
},
"metric_bgwriter": {
"name": "metric_bgwriter",
"record_type": "metric_bgwriter_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"}
],
"history": "'||(v_query->'history'->>'instance_id')||'",
"expand": "'||(v_query->'expand'->>'instance_id')||'",
"aggregate": '||q_metric_bgwriter_agg||'
},
"metric_db_size": {
"name": "metric_db_size",
"record_type": "metric_db_size_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_db_size_agg||'
},
"metric_tblspc_size": {
"name": "metric_tblspc_size",
"record_type": "metric_tblspc_size_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "spcname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'spcname')||'",
"expand": "'||(v_query->'expand'->>'spcname')||'",
"aggregate": '||q_metric_tblspc_size_agg||'
},
"metric_filesystems_size": {
"name": "metric_filesystems_size",
"record_type": "metric_filesystems_size_record",
"columns":
[
{"name": "host_id", "data_type": "INTEGER NOT NULL REFERENCES hosts (host_id)"},
{"name": "mount_point", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'mount_point')||'",
"expand": "'||(v_query->'expand'->>'mount_point')||'",
"aggregate": '||q_metric_filesystems_size_agg||'
},
"metric_temp_files_size_tblspc": {
"name": "metric_temp_files_size_tblspc",
"record_type": "metric_temp_files_size_tblspc_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "spcname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'spcname')||'",
"expand": "'||(v_query->'expand'->>'spcname')||'",
"aggregate": '||q_metric_temp_files_size_tblspc_agg||'
},
"metric_temp_files_size_db": {
"name": "metric_temp_files_size_db",
"record_type": "metric_temp_files_size_db_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_temp_files_size_db_agg||'
},
"metric_wal_files": {
"name": "metric_wal_files",
"record_type": "metric_wal_files_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"}
],
"history": "'||(v_query->'history'->>'instance_id')||'",
"expand": "'||(v_query->'expand'->>'instance_id')||'",
"aggregate": '||q_metric_wal_files_agg||'
},
"metric_cpu": {
"name": "metric_cpu",
"record_type": "metric_cpu_record",
"columns":
[
{"name": "host_id", "data_type": "INTEGER NOT NULL REFERENCES hosts (host_id)"},
{"name": "cpu", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'cpu')||'",
"expand": "'||(v_query->'expand'->>'cpu')||'",
"aggregate": '||q_metric_cpu_agg||'
},
"metric_process": {
"name": "metric_process",
"record_type": "metric_process_record",
"columns":
[
{"name": "host_id", "data_type": "INTEGER NOT NULL REFERENCES hosts (host_id)"}
],
"history": "'||(v_query->'history'->>'host_id')||'",
"expand": "'||(v_query->'expand'->>'host_id')||'",
"aggregate": '||q_metric_process_agg||'
},
"metric_memory": {
"name": "metric_memory",
"record_type": "metric_memory_record",
"columns":
[
{"name": "host_id", "data_type": "INTEGER NOT NULL REFERENCES hosts (host_id)"}
],
"history": "'||(v_query->'history'->>'host_id')||'",
"expand": "'||(v_query->'expand'->>'host_id')||'",
"aggregate": '||q_metric_memory_agg||'
},
"metric_loadavg": {
"name": "metric_loadavg",
"record_type": "metric_loadavg_record",
"columns":
[
{"name": "host_id", "data_type": "INTEGER NOT NULL REFERENCES hosts (host_id)"}
],
"history": "'||(v_query->'history'->>'host_id')||'",
"expand": "'||(v_query->'expand'->>'host_id')||'",
"aggregate": '||q_metric_loadavg_agg||'
},
"metric_vacuum_analyze": {
"name": "metric_vacuum_analyze",
"record_type": "metric_vacuum_analyze_record",
"columns":
[
{"name": "instance_id", "data_type": "INTEGER NOT NULL REFERENCES instances (instance_id)"},
{"name": "dbname", "data_type": "TEXT NOT NULL"}
],
"history": "'||(v_query->'history'->>'dbname')||'",
"expand": "'||(v_query->'expand'->>'dbname')||'",
"aggregate": '||q_metric_vacuum_analyze_agg||'
}}')::JSON INTO v_conf;
RETURN v_conf;
END;
$$;
|
<reponame>glamod/glamod_database<gh_stars>0
CREATE TABLE __INSERT_SCHEMA__.time_quality (
quality INT,
description VARCHAR,
PRIMARY KEY(quality)
);
|
<gh_stars>0
--
-- Transaction table.
--
CREATE TABLE transaction_table
(
id BIGINT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY,
uid VARCHAR(40) NOT NULL,
txn_id VARCHAR(40) NOT NULL UNIQUE,
txn_type VARCHAR(16) NOT NULL,
txn_status VARCHAR(16) NOT NULL
);
CREATE TABLE money_reload_txn_table
(
id BIGINT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY,
txn_id VARCHAR(40) NOT NULL UNIQUE,
amount FLOAT NOT NULL,
CONSTRAINT fk_money_reload_txn_table_txn_id
FOREIGN KEY (txn_id)
REFERENCES transaction_table (txn_id)
);
CREATE TABLE airtime_send_txn_table
(
id BIGINT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY,
txn_id VARCHAR(40) NOT NULL UNIQUE,
amount FLOAT NOT NULL,
phone_number VARCHAR(16) NOT NULL,
CONSTRAINT fk_airtime_send_txn_table_txn_id
FOREIGN KEY (txn_id)
REFERENCES transaction_table (txn_id)
); |
<gh_stars>0
--creating default users
INSERT INTO userDetails (userName, name, password) VALUES ('Harry12', '<NAME>', 'password'),
('Dylannnn', '<NAME>','password556'),
('Marky', '<NAME>','password'),
('Jonny', '<NAME>','password556'),
('Murph', '<NAME>','password115'),
('Dan', '<NAME>','password55'),
('Soph', '<NAME>','password<PASSWORD>'),
('Albahe', '<NAME>','password556'),
('Bert', '<NAME>','password'),
('Emmmmmma', '<NAME>','password556'),
('Martinahhh', '<NAME>','password6'),
('MikeRoss', '<NAME>','password65'),
('DarthVader', '<NAME>','password<PASSWORD>'),
('PickleRick', '<NAME>','password<PASSWORD>'),
('Windows', '<NAME>','password8'),
('BillyTheKid', '<NAME>','password55'),
('Cooney', '<NAME>','password56'),
('Ger', '<NAME>','password'),
('Mags', '<NAME>','password556'),
('Tommy', '<NAME>','password556');
|
-- phpMyAdmin SQL Dump
-- version 4.7.3
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: 2018 年 2 月 10 日 21:31
-- サーバのバージョン: 10.1.24-MariaDB-cll-lve
-- PHP Version: 5.6.30
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `udnzcdwq_cosplay_mixhost`
--
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
<gh_stars>0
CREATE TABLE `restriction_setting` (
`id` integer,
`n` integer,
`d` enum('day', 'week', 'month') default 'day',
`g` enum('individual', 'group') default 'group',
`tz` char(255) default 'UTC',
PRIMARY KEY (`id`)
) AUTO_INCREMENT=1;
INSERT INTO `restriction_setting` (`id`,`n`,`d`, `g`, `tz`) VALUES (1,2,'day','individual','Asia/Kolkata');
|
<gh_stars>0
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'SQL_ASCII';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_with_oids = false;
CREATE SCHEMA consent;
GRANT ALL ON SCHEMA consent to postgres;
GRANT USAGE ON SCHEMA consent TO auth;
CREATE TYPE consent.status_enum AS ENUM ('rejected', 'pending', 'approved');
CREATE TYPE consent.role_enum AS ENUM ('consumer', 'data ingester', 'onboarder', 'delegate', 'provider', 'admin');
CREATE TYPE consent.access_item AS ENUM ('resourcegroup', 'catalogue', 'delegate');
CREATE TYPE consent.capability_enum AS ENUM ('temporal', 'complex', 'subscription');
CREATE TABLE consent.organizations (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
name character varying NOT NULL,
website character varying NOT NULL,
city character varying NOT NULL,
state character varying(2) NOT NULL,
country character varying(2) NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE UNIQUE INDEX idx_organizations_id ON consent.organizations(id);
CREATE UNIQUE INDEX idx_organizations_website ON consent.organizations(website);
CREATE TABLE consent.users (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
title character varying NOT NULL,
first_name character varying NOT NULL,
last_name character varying NOT NULL,
email character varying NOT NULL,
phone character varying(10) NOT NULL,
organization_id integer REFERENCES consent.organizations(id) ,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE UNIQUE INDEX idx_users_id ON consent.users(id);
CREATE UNIQUE INDEX idx_users_email ON consent.users(email);
CREATE TABLE consent.role (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
user_id integer NOT NULL REFERENCES consent.users(id) ON DELETE CASCADE,
role consent.role_enum NOT NULL,
status consent.status_enum NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE UNIQUE INDEX idx_role_id ON consent.role(id);
CREATE TABLE consent.certificates (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
user_id integer NOT NULL REFERENCES consent.users(id) ON DELETE CASCADE,
csr character varying NOT NULL,
cert character varying ,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE TABLE consent.access (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
provider_id integer NOT NULL REFERENCES consent.users(id) ON DELETE CASCADE,
role_id integer REFERENCES consent.role(id) ON DELETE CASCADE,
policy_text character varying NOT NULL,
access_item_id integer ,
access_item_type consent.access_item ,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE UNIQUE INDEX idx_access_id ON consent.access(id);
CREATE TABLE consent.resourcegroup (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
provider_id integer NOT NULL REFERENCES consent.users(id) ON DELETE CASCADE,
cat_id character varying NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
CREATE TABLE consent.capability (
id integer GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
access_id integer NOT NULL REFERENCES consent.access(id) ON DELETE CASCADE,
capability consent.capability_enum NOT NULL,
UNIQUE (access_id, capability)
);
ALTER TABLE consent.organizations OWNER TO postgres;
ALTER TABLE consent.users OWNER TO postgres;
ALTER TABLE consent.role OWNER TO postgres;
ALTER TABLE consent.certificates OWNER TO postgres;
ALTER TABLE consent.access OWNER TO postgres;
ALTER TABLE consent.resourcegroup OWNER TO postgres;
ALTER TABLE consent.capability OWNER TO postgres;
GRANT SELECT,INSERT,UPDATE ON TABLE consent.organizations TO auth;
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE consent.users TO auth;
GRANT SELECT,INSERT,UPDATE ON TABLE consent.certificates TO auth;
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE consent.role TO auth;
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE consent.resourcegroup TO auth;
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE consent.access TO auth;
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE consent.capability TO auth;
|
IF 1 <= (SELECT Weight from DimProduct WHERE ProductKey = 1)
SELECT ProductKey, EnglishDescription, Weight, 'This product is too heavy to ship and is only available for pickup.'
AS ShippingStatus
FROM DimProduct WHERE ProductKey = 1
ELSE
SELECT ProductKey, EnglishDescription, Weight, 'This product is available for shipping or pickup.'
AS ShippingStatus
FROM DimProduct WHERE ProductKey = 1
|
use vulcan_db;
INSERT INTO Users
(id, firstName, lastName, email, password, createdAt, updatedAt)
VALUES
("2", "Bob", "Bobby", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("3", "Rob", "Robby", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("4", "Lisa", "Lizzy", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("5", "Tom", "Tommy", "<EMAIL>", "<PASSWORD>", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("6", "Bill", "Billy", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("7", "Pete", "Petey", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("8", "Jill", "Jilly", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("9", "Tim", "Timmy", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19"),
("10", "Steve", "Stevey", "<EMAIL>", "123123123", "2021-03-23 03:34:19", "2021-03-23 03:34:19");
INSERT INTO servicePros
(servicePro_companyName, servicePro_url, servicePro_phone, servicePro_category, servicePro_address, servicePro_city, servicePro_state, servicePro_zipCode, createdAt, updatedAt, UserId)
VALUES
("Bobs Plumbing", "www.bobsplumbing.com", "123-123-1234", "Plumber", "3310 Walnut Ln", "Random City", "CA", "92101", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "2"),
("Johns Electrical", "www.johnselectrical.com", "123-123-1234", "Electrican", "807 Elm St", "Random City2", "CA", "92019", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "3"),
("Sarahs HVAC", "www.sarahhvac.com", "123-123-1234", "HVAC", "908 Main St", "Random City3", "CA", "92015", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "4"),
("Diego Refrigeration", "www.diegofrig.com", "123-123-1234", "Refrigeration", "398 Ocean Blvd", "Random City4", "CA", "92101", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "5"),
("Kumals Cleaning", "www.kumalscleaning.com", "123-123-1234", "General Cleaning", "668 Juniper St", "Random City5", "CA", "92103", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "6"),
("<NAME>", "www.meganslinens.com", "123-123-1234", "Linens", "379 Lakeview Ave", "Random City6", "CA", "91978", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "7"),
("Amandas Pest Control", "www.amandaspestcontrol.com", "123-123-1234", "Pest Control", "756 Iris Dr", "Random City7", "CA", "92154", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "8"),
("Toms Window Repair", "www.tomswindowrepair.com", "123-123-1234", "Window Repair", "899 Date Ave", "Random City8", "CA", "92023", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "9"),
("Bills Apparel", "www.billsapparel.com", "123-123-1234", "Apparel", "472 Harbor Dr", "Random City9", "CA", "92106", "2021-03-23 03:34:19", "2021-03-23 03:34:19", "10");
|
CREATE TABLE tags (
id SERIAL PRIMARY KEY,
label VARCHAR NOT NULL
)
|
-- This document is the master SQL file for creating and
-- initializing tables.
DROP DATABASE IF EXISTS testGM;
CREATE DATABASE testGM;
USE testGM;
-- -----------------------------------------------------
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
-- -----------------------------------------------------
-- -----------------------------------------------------
-- Table `user`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `user` ;
CREATE TABLE IF NOT EXISTS `user` (
`id` INT NOT NULL AUTO_INCREMENT,
`email` VARCHAR(512) NOT NULL,
`password` VARCHAR(32) NOT NULL,
`fname` VARCHAR(128) NULL,
`lname` VARCHAR(128) NULL,
`create_date` DATETIME NOT NULL,
`is_admin` TINYINT(1) NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `pending_user`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `pending_user` ;
CREATE TABLE IF NOT EXISTS `pending_user` (
`id` INT NOT NULL AUTO_INCREMENT,
`email` VARCHAR(512) NOT NULL,
`password` VARCHAR(32) NOT NULL,
`fname` VARCHAR(128) NULL,
`lname` VARCHAR(128) NULL,
`create_date` DATETIME NOT NULL,
`position` TEXT NOT NULL,
`reason` TEXT NOT NULL,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `explicit_content`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `explicit_content` ;
CREATE TABLE IF NOT EXISTS `explicit_content` (
`id` INT NOT NULL AUTO_INCREMENT,
`explicit_words` VARCHAR(128) NOT NULL UNIQUE,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `common_content`
-- -----------------------------------------------------
DROP TABLE IF EXISTS `common_content` ;
CREATE TABLE IF NOT EXISTS `common_content` (
`id` INT NOT NULL AUTO_INCREMENT,
`common_words` VARCHAR(128) NOT NULL UNIQUE,
PRIMARY KEY (`id`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `makes`
-- -----------------------------------------------------
DROP TABLE IF EXISTS makes;
CREATE TABLE makes(
make_id INT NOT NULL AUTO_INCREMENT,
make_name varchar(128) NOT NULL UNIQUE,
PRIMARY KEY(make_id))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `models`
-- -----------------------------------------------------
DROP TABLE IF EXISTS models;
CREATE TABLE models(
model_id INT NOT NULL AUTO_INCREMENT,
make_id INT NOT NULL,
model_name varchar(128) NOT NULL UNIQUE,
PRIMARY KEY(model_id))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `model_years`
-- -----------------------------------------------------
DROP TABLE IF EXISTS model_years;
CREATE TABLE model_years(
year_id INT NOT NULL AUTO_INCREMENT,
model_id INT NOT NULL,
year_name varchar(128) NOT NULL,
PRIMARY KEY(year_id))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `make_alternates`
-- -----------------------------------------------------
DROP TABLE IF EXISTS make_alternates;
CREATE TABLE make_alternates(
make_alternate_id INT NOT NULL AUTO_INCREMENT,
make_id INT NOT NULL,
make_alternate_name varchar(128) NOT NULL,
PRIMARY KEY(make_alternate_id))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `model_alternates`
-- -----------------------------------------------------
DROP TABLE IF EXISTS model_alternates;
CREATE TABLE model_alternates(
model_alternate_id INT NOT NULL AUTO_INCREMENT,
model_id INT NOT NULL,
model_alternate_name varchar(128) NOT NULL,
PRIMARY KEY(model_alternate_id))
ENGINE = InnoDB;
-- -----------------------------------------------------
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
-- -----------------------------------------------------
-- -----------------------------------------------------
-- Initial data
-- -----------------------------------------------------
START TRANSACTION;
INSERT INTO user (email, password, fname, lname, create_date, is_admin) VALUES ('<EMAIL>', '<PASSWORD>', 'GM', 'Admin', '9999-12-31 23:59:59', '1');
INSERT INTO explicit_content (explicit_words) VALUES ('shit'),('fuck'),('damn'),('bitch'),('piss'),('dick'),('cock'),('pussy'),('asshole'),('fag'),('bastard'),('douche'),('ass'),('cunt'),('motherfucker'),('nigger'),('whore'),('dickhead');
INSERT INTO common_content (common_words) VALUE ('the'),('be'),('to'),('of'),('and'),('a'),('in'),('that'),('have'),('I'),('it'),('for'),('not'),('on'),('with'),('he'),('as'),('you'),('do'),('at'),('this'),('but'),('his'),('by'),('from'),('they'),('we'),('say'),('her'),('she');
INSERT INTO makes (make_name) VALUES ('Chevrolet'),('Buick'),('Cadillac'),('GMC');
INSERT INTO models (make_id, model_name) VALUES (1, 'Cruze'),(1, 'Sonic'),(1, 'Spark'),(1, 'Captiva'),(1, 'Colorado'),
(1, 'Impala'),(1, 'Malibu'),(1, 'Camaro'),(1, 'Corvette'),(1, 'SS'),
(1, 'Traverse'),(1, 'Tahoe'),(1, 'Suburban'),(1, 'Express'),(1, 'Trax'),
(1, 'Savana'),(1, 'Avalanche'),(1, 'Silverado'),(1, 'Equinox');
INSERT INTO models (make_id, model_name) VALUES (2, 'Verano'),(2, 'Lacrosse'),(2, 'Regal'),(2, 'Encore'),(2, 'Enclave');
INSERT INTO models (make_id, model_name) VALUES (3, 'CTS'),(3, 'XTS'),(3, 'ATS'),(3,'CTS_V'),(3, 'SRX'),(3, 'Escalade');
INSERT INTO models (make_id, model_name) VALUES (4, 'Terrain'),(4, 'Acadia'),(4, 'Yukon'),(4, 'Sierra');
INSERT INTO model_years (model_id, year_name) VALUES (1, '2014'), (1, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (2, '2014'), (2, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (3, '2014'), (3, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (4, '2014'), (4, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (5, '2014'), (5, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (6, '2014'), (6, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (7, '2014'), (7, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (8, '2014'), (8, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (9, '2014'), (9, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (10, '2014'), (10, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (11, '2014'), (11, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (12, '2014'), (12, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (13, '2014'), (13, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (14, '2014'), (14, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (15, '2014'), (15, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (16, '2014'), (16, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (17, '2014'), (17, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (18, '2014'), (18, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (19, '2014'), (19, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (20, '2014'), (20, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (21, '2014'), (21, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (22, '2014'), (22, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (23, '2014'), (23, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (24, '2014'), (24, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (25, '2014'), (25, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (26, '2014'), (26, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (27, '2014'), (27, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (28, '2014'), (28, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (29, '2014'), (29, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (30, '2014'), (30, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (31, '2014'), (31, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (32, '2014'), (32, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (33, '2014'), (33, '2015');
INSERT INTO model_years (model_id, year_name) VALUES (34, '2014'), (34, '2015');
INSERT INTO make_alternates (make_id, make_alternate_name) VALUES (1, 'Chevy'),(3, 'Caddy');
INSERT INTO model_alternates (model_id, model_alternate_name) VALUES (9, 'Vette');
COMMIT; |
CREATE OR REPLACE TRIGGER CLEAN_ORDER_BOOK_FOR_ORDERS
AFTER DELETE
ON ORDERS
FOR EACH ROW
DECLARE
BEGIN
DELETE FROM ORDER_BOOK WHERE ORDER_ID = :OLD.ORDER_ID;
END;
/
|
USE [ftnpowerdb]
GO
/****** Object: StoredProcedure [dbo].[SP_GlobalTop20Users] Script Date: 29.08.2020 11:59:12 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_GlobalTop20Users]
AS
BEGIN
select top 20 a.AccountPowerLevel, a.PlayerName, '' as Id, a.EpicId, a.CommanderLevel, a.CollectionBookLevel
from(SELECT TOP 30 CAST(AccountPowerLevel AS float) as AccountPowerLevel, PlayerName, EpicId, CommanderLevel, CollectionBookLevel,ROW_NUMBER() OVER (PARTITION BY PlayerName ORDER BY AccountPowerLevel DESC) AS RN
from dbo.FortnitePVEProfiles order by AccountPowerLevel desc, CommanderLevel desc, CollectionBookLevel desc) as a where RN = 1
END
GO
/****** Object: StoredProcedure [dbo].[SP_IJTABLE_UserNotValid] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_IJTABLE_UserNotValid]
@GuildId [nvarchar](max),
@UserId [nvarchar](max)
AS
BEGIN
exec dbo.SP_User_ValidState @UserId, 0
exec SP_NameState_InQueue @GuildId, @UserId, 0
END
GO
/****** Object: StoredProcedure [dbo].[SP_ListOfReadyToUpdate] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_ListOfReadyToUpdate]
AS
BEGIN
select distinct DiscordServerId, FortniteUserId, EpicId, NameTag, GameUserMode, PVEDecimals
from dbo.NameStates as a
INNER JOIN dbo.FortniteUsers as b ON b.Id = a.FortniteUserId
INNER JOIN dbo.DiscordServers c ON c.Id = a.DiscordServerId
where
EpicId IS NOT NULL and
a.LockName = 0 and
b.IsValidName = 1 and
a.DiscordServerId in (select SUBSTRING(Id, 2,25 ) from PriorityTables where DATEDIFF(MINUTE, PriorityTables.Deadline, SYSDATETIMEOFFSET()) < 1 and Id like 's%')
order by a.FortniteUserId
END
GO
/****** Object: StoredProcedure [dbo].[SP_LocalTop20Users] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
create PROCEDURE [dbo].[SP_LocalTop20Users]
@GuildId [nvarchar](max)
AS
BEGIN
select top 20 a.AccountPowerLevel, a.PlayerName, a.Id, a.EpicId, a.CommanderLevel, a.CollectionBookLevel from(select TOP 30 CAST(c.AccountPowerLevel AS float) as AccountPowerLevel, c.PlayerName, FortniteUserId as Id, b.EpicId, c.CommanderLevel, c.CollectionBookLevel,ROW_NUMBER() OVER (PARTITION BY PlayerName ORDER BY AccountPowerLevel DESC) AS RN
from dbo.NameStates
INNER JOIN dbo.FortniteUsers as b ON b.Id = dbo.NameStates.FortniteUserId
INNER JOIN dbo.FortnitePVEProfiles as c ON c.EpicId collate Turkish_CI_AS = b.EpicId
where DiscordServerId = @GuildId and b.IsValidName=1 and NOT EXISTS (SELECT Id from dbo.BlackListUsers where b.Id = dbo.BlackListUsers.Id)
order by AccountPowerLevel desc, CommanderLevel desc,CollectionBookLevel desc) as a where RN = 1
END
GO
/****** Object: StoredProcedure [dbo].[SP_NameState_ClearQueueByGuildId] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_NameState_ClearQueueByGuildId]
@GuildId [nvarchar](max)
AS
BEGIN
update dbo.NameStates
set InQueue = 0
WHERE DiscordServerId = @GuildId and InQueue != 0
END
GO
/****** Object: StoredProcedure [dbo].[SP_NameState_InQueue] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_NameState_InQueue]
@GuildId [nvarchar](max),
@UserId [nvarchar](max),
@InQueue [bit]
AS
BEGIN
update dbo.NameStates
set InQueue = @InQueue
WHERE FortniteUserId = @UserId and DiscordServerId = @GuildId and InQueue != @InQueue
END
GO
/****** Object: StoredProcedure [dbo].[SP_RemoveNameStateForDiscord] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_RemoveNameStateForDiscord]
@GuildId [nvarchar](max),
@UserId [nvarchar](max)
AS
BEGIN
DELETE FROM NameStates WHERE DiscordServerId = @GuildId and FortniteUserId = @UserId
END
GO
/****** Object: StoredProcedure [dbo].[SP_TABLE_FortnitePVEProfile_Update] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
create PROCEDURE [dbo].[SP_TABLE_FortnitePVEProfile_Update]
@EpicId [nvarchar](50),
@PlayerName [nvarchar](50),
@AccountPowerLevel [float],
@Map [int],
@CommanderLevel [int],
@CollectionBookLevel [int],
@NumMythicSchematics [int],
@EliteFortnite2019 [bit]
AS
BEGIN
IF EXISTS (SELECT 1 FROM dbo.FortnitePVEProfiles where EpicId = @EpicId)
BEGIN
UPDATE dbo.FortnitePVEProfiles
set PlayerName = @PlayerName,
AccountPowerLevel = @AccountPowerLevel,
Map = @Map,
CommanderLevel = @CommanderLevel,
CollectionBookLevel = @CollectionBookLevel,
NumMythicSchematics = @NumMythicSchematics,
EliteFortnite2019 = @EliteFortnite2019
where EpicId = @EpicId
END
ELSE
BEGIN
insert into dbo.FortnitePVEProfiles (EpicId,PlayerName,AccountPowerLevel, Map, CommanderLevel, CollectionBookLevel, NumMythicSchematics, EliteFortnite2019)
values (@EpicId,@PlayerName,@AccountPowerLevel,@Map,@CommanderLevel,@CollectionBookLevel,@NumMythicSchematics, @EliteFortnite2019)
END
END
GO
/****** Object: StoredProcedure [dbo].[SP_TABLE_FortnitePVPProfile_Update] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
create PROCEDURE [dbo].[SP_TABLE_FortnitePVPProfile_Update]
@EpicId [nvarchar](50),
@PlayerName [nvarchar](50),
@PvpWinSolo [int],
@PvpWinDuo [int],
@PvpWinSquad [int]
AS
BEGIN
IF EXISTS (SELECT 1 FROM dbo.FortnitePVPProfiles where EpicId = @EpicId)
BEGIN
UPDATE dbo.FortnitePVPProfiles
set PlayerName = @PlayerName,
PvpWinSolo = @PvpWinSolo,
PvpWinDuo = @PvpWinDuo,
PvpWinSquad = @PvpWinSquad
where EpicId = @EpicId
END
ELSE
BEGIN
insert into dbo.FortnitePVPProfiles (EpicId,PlayerName,PvpWinSolo,PvpWinDuo,PvpWinSquad)
values (@EpicId,@PlayerName,@PvpWinSolo,@PvpWinDuo,@PvpWinSquad)
END
END
GO
/****** Object: StoredProcedure [dbo].[SP_TABLE_FortniteUser_Update] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_TABLE_FortniteUser_Update]
@Id [nvarchar](20),
@EpicId [nvarchar](50)= NULL,
@NameTag [bit],
@IsValidName [bit],
@GameUserMode [int]
AS
BEGIN
IF EXISTS (SELECT 1 FROM dbo.FortniteUsers where Id = @Id)
BEGIN
UPDATE dbo.FortniteUsers
set EpicId = @EpicId, NameTag = @NameTag, IsValidName = @IsValidName, GameUserMode = @GameUserMode, LastUpDateTime = SYSDATETIMEOFFSET()
where Id = @Id and DATEDIFF(SECOND, LastUpDateTime, SYSDATETIMEOFFSET()) > 5
END
ELSE
BEGIN
insert into dbo.FortniteUsers (Id, EpicId, NameTag, IsValidName, GameUserMode, LastUpDateTime)
values (@Id, @EpicId, @NameTag, @IsValidName, @GameUserMode, SYSDATETIMEOFFSET())
END
END
GO
/****** Object: StoredProcedure [dbo].[SP_User_LastUpdateTime] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_User_LastUpdateTime]
@UserId [nvarchar](max)
AS
BEGIN
update dbo.FortniteUsers
set LastUpDateTime = SYSDATETIMEOFFSET()
WHERE Id = @UserId and DATEDIFF(SECOND, LastUpDateTime, SYSDATETIMEOFFSET()) > 1
END
GO
/****** Object: StoredProcedure [dbo].[SP_User_ValidState] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[SP_User_ValidState]
@UserId [nvarchar](max),
@ValidState [bit]
AS
BEGIN
update dbo.FortniteUsers
set IsValidName = @ValidState,
LastUpDateTime = SYSDATETIMEOFFSET()
WHERE Id = @UserId and IsValidName != @ValidState and DATEDIFF(SECOND, LastUpDateTime, SYSDATETIMEOFFSET()) > 5
END
GO
/****** Object: StoredProcedure [dbo].[SP_User_VerifiedProfile] Script Date: 29.08.2020 11:59:13 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
create PROCEDURE [dbo].[SP_User_VerifiedProfile]
@Id [nvarchar](20),
@VerifiedProfile [bit]
AS
BEGIN
update dbo.FortniteUsers
set VerifiedProfile = @VerifiedProfile
WHERE Id = @Id
END
GO
|
<reponame>super132/flyway
--
-- Copyright 2010-2017 Boxfuse GmbH
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
CREATE TABLE network_device_day_start_state
( segment_Id INTEGER DEFAULT 1 NOT NULL,
day TIMESTAMP,
net_device_id INTEGER,
uiq_device_state_id INTEGER,
PRIMARY KEY (day, net_device_id)); |
-- Schema: hypw
-- DROP SCHEMA hypw;
CREATE SCHEMA hypw AUTHORIZATION elf_admin;
COMMENT ON SCHEMA hypw IS 'Schemata für Hydrography';
-- == Tabellen Physical Water =======
-- Crossing
CREATE TABLE hypw.crossing_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.crossing_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.crossing_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.crossing_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.crossing (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
type_href text,
type_nil boolean,
type_nilreason text,
namedplace_fk integer REFERENCES hypw.crossing_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.crossing_hydroid ON DELETE RESTRICT,
CONSTRAINT crossing_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'crossing','geometry','4258','GEOMETRY', 2);
CREATE INDEX crossing_geometry_idx ON hypw.crossing USING GIST (geometry);
ALTER TABLE hypw.crossing_geographicalname OWNER TO elf_admin;
ALTER TABLE hypw.crossing_geographicalname_spelling OWNER TO elf_admin;
ALTER TABLE hypw.crossing_hydroid OWNER TO elf_admin;
ALTER TABLE hypw.crossing OWNER TO elf_admin;
-- DamOrWeir
CREATE TABLE hypw.damorweir_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.damorweir_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.damorweir_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.damorweir_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.damorweir (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
namedplace_fk integer REFERENCES hypw.damorweir_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.damorweir_hydroid ON DELETE RESTRICT,
CONSTRAINT damorweir_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'damorweir','geometry','4258','GEOMETRY', 2);
CREATE INDEX damorweir_geometry_idx ON hypw.damorweir USING GIST (geometry);
ALTER TABLE hypw.damorweir_geographicalname OWNER TO elf;
ALTER TABLE hypw.damorweir_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.damorweir_hydroid OWNER TO elf;
ALTER TABLE hypw.damorweir OWNER TO elf;
-- DrainageBasin
CREATE TABLE hypw.drainagebasin_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.drainagebasin_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.drainagebasin_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.drainagebasin_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.drainagebasin (
localid text,
area numeric,
area_nilreason text,
area_uom text,
area_nil boolean,
basinorder_nilreason text,
basinorder_nil boolean,
basinorder_hydroordercode_order text,
basinorder_hydroordercode_orderscheme text,
basinorder_hydroordercode_scope text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
origin text,
origin_nilreason text,
origin_nil boolean,
namedplace_fk integer REFERENCES hypw.drainagebasin_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.drainagebasin_hydroid ON DELETE RESTRICT,
CONSTRAINT drainagebasin_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'drainagebasin','geometry','4258','GEOMETRY', 2);
CREATE INDEX drainagebasin_geometry_idx ON hypw.drainagebasin USING GIST (geometry);
CREATE TABLE hypw.drainagebasin_containsbasin (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.drainagebasin ON DELETE CASCADE,
href text
);
CREATE TABLE hypw.drainagebasin_outlet (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.drainagebasin ON DELETE CASCADE,
href text,
nil boolean,
nilreason text
);
ALTER TABLE hypw.drainagebasin_geographicalname OWNER TO elf;
ALTER TABLE hypw.drainagebasin_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.drainagebasin_hydroid OWNER TO elf;
ALTER TABLE hypw.drainagebasin OWNER TO elf;
ALTER TABLE hypw.drainagebasin_containsbasin OWNER TO elf;
ALTER TABLE hypw.drainagebasin_outlet OWNER TO elf;
-- Falls
CREATE TABLE hypw.falls_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.falls_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.falls_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.falls_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.falls (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
height numeric,
height_uom text,
height_nil boolean,
height_nilreason text,
namedplace_fk integer REFERENCES hypw.falls_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.falls_hydroid ON DELETE RESTRICT,
CONSTRAINT falls_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'falls','geometry','4258','GEOMETRY', 2);
CREATE INDEX falls_geometry_idx ON hypw.falls USING GIST (geometry);
ALTER TABLE hypw.falls_geographicalname OWNER TO elf;
ALTER TABLE hypw.falls_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.falls_hydroid OWNER TO elf;
ALTER TABLE hypw.falls OWNER TO elf;
-- Ford
CREATE TABLE hypw.ford_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.ford_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.ford_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.ford_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.ford (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
namedplace_fk integer REFERENCES hypw.ford_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.ford_hydroid ON DELETE RESTRICT,
CONSTRAINT ford_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'ford','geometry','4258','GEOMETRY', 2);
CREATE INDEX ford_geometry_idx ON hypw.ford USING GIST (geometry);
ALTER TABLE hypw.ford_geographicalname OWNER TO elf;
ALTER TABLE hypw.ford_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.ford_hydroid OWNER TO elf;
ALTER TABLE hypw.ford OWNER TO elf;
-- Lock
CREATE TABLE hypw.lock_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.lock_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.lock_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.lock_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.lock (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
namedplace_fk integer REFERENCES hypw.lock_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.lock_hydroid ON DELETE RESTRICT,
CONSTRAINT lock_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'lock','geometry','4258','GEOMETRY', 2);
CREATE INDEX lock_geometry_idx ON hypw.lock USING GIST (geometry);
ALTER TABLE hypw.lock_geographicalname OWNER TO elf;
ALTER TABLE hypw.lock_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.lock_hydroid OWNER TO elf;
ALTER TABLE hypw.lock OWNER TO elf;
-- Rapids
CREATE TABLE hypw.rapids_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.rapids_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.rapids_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.rapids_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.rapids (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
namedplace_fk integer REFERENCES hypw.rapids_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.rapids_hydroid ON DELETE RESTRICT,
CONSTRAINT rapids_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'rapids','geometry','4258','GEOMETRY', 2);
CREATE INDEX rapids_geometry_idx ON hypw.rapids USING GIST (geometry);
ALTER TABLE hypw.rapids_geographicalname OWNER TO elf;
ALTER TABLE hypw.rapids_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.rapids_hydroid OWNER TO elf;
ALTER TABLE hypw.rapids OWNER TO elf;
-- RiverBasin
CREATE TABLE hypw.riverbasin_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.riverbasin_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.riverbasin_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.riverbasin_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.riverbasin (
localid text,
area numeric,
area_nilreason text,
area_uom text,
area_nil boolean,
basinorder_nilreason text,
basinorder_nil boolean,
basinorder_hydroordercode_order text,
basinorder_hydroordercode_orderscheme text,
basinorder_hydroordercode_scope text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
origin text,
origin_nilreason text,
origin_nil boolean,
namedplace_fk integer REFERENCES hypw.riverbasin_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.riverbasin_hydroid ON DELETE RESTRICT,
CONSTRAINT riverbasin_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'riverbasin','geometry','4258','GEOMETRY', 2);
CREATE INDEX riverbasin_geometry_idx ON hypw.riverbasin USING GIST (geometry);
CREATE TABLE hypw.riverbasin_containsbasin (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.riverbasin ON DELETE CASCADE,
href text,
nil boolean,
nilreason text
);
CREATE TABLE hypw.riverbasin_outlet (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.riverbasin ON DELETE CASCADE,
href text,
nil boolean,
nilreason text
);
ALTER TABLE hypw.riverbasin_geographicalname OWNER TO elf;
ALTER TABLE hypw.riverbasin_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.riverbasin_hydroid OWNER TO elf;
ALTER TABLE hypw.riverbasin OWNER TO elf;
ALTER TABLE hypw.riverbasin_containsbasin OWNER TO elf;
ALTER TABLE hypw.riverbasin_outlet OWNER TO elf;
-- Shore
CREATE TABLE hypw.shore_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.shore_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.shore_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.shore_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.shore (
localid text,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
composition_owns boolean,
composition_nilreason text,
composition_remoteschema text,
composition_nil boolean,
delineationknown boolean,
delineationknown_nilreason text,
delineationknown_nil boolean,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
namedplace_fk integer REFERENCES hypw.shore_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.shore_hydroid ON DELETE RESTRICT,
CONSTRAINT shore_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'shore','geometry','4258','GEOMETRY', 2);
CREATE INDEX shore_geometry_idx ON hypw.shore USING GIST (geometry);
ALTER TABLE hypw.shore_geographicalname OWNER TO elf;
ALTER TABLE hypw.shore_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.shore_hydroid OWNER TO elf;
ALTER TABLE hypw.shore OWNER TO elf;
-- Sluice
CREATE TABLE hypw.sluice_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.sluice_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.sluice_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.sluice_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.sluice (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
namedplace_fk integer REFERENCES hypw.sluice_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.sluice_hydroid ON DELETE RESTRICT,
CONSTRAINT sluice_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'sluice','geometry','4258','GEOMETRY', 2);
CREATE INDEX sluice_geometry_idx ON hypw.sluice USING GIST (geometry);
ALTER TABLE hypw.sluice_geographicalname OWNER TO elf;
ALTER TABLE hypw.sluice_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.sluice_hydroid OWNER TO elf;
ALTER TABLE hypw.sluice OWNER TO elf;
-- StandingWater
CREATE TABLE hypw.standingwater_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.standingwater_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.standingwater_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.standingwater_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.standingwater (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
lod_denominator_nilreason text,
lod_denominator integer,
localtype_xlink_href text,
localtype_xlink_title text,
localtype_nilreason text,
localtype_nil boolean,
localtype_localisedcharacterstring text,
localtype_localisedcharacterstring_id text,
localtype_localisedcharacterstring_locale text,
origin text,
origin_nilreason text,
origin_nil boolean,
persistence_href text,
persistence_nil boolean,
persistence_nilreason text,
tidal boolean,
tidal_nilreason text,
tidal_nil boolean,
elevation numeric,
elevation_uom text,
elevation_nil boolean,
elevation_nilreason text,
meandepth numeric,
meandepth_uom text,
meandepth_nil boolean,
meandepth_nilreason text,
surfacearea numeric,
surfacearea_uom text,
surfacearea_nil boolean,
surfacearea_nilreason text,
namedplace_fk integer REFERENCES hypw.standingwater_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.standingwater_hydroid ON DELETE RESTRICT,
CONSTRAINT standingwater_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'standingwater','geometry','4258','GEOMETRY', 2);
CREATE INDEX standingwater_geometry_idx ON hypw.standingwater USING GIST (geometry);
CREATE TABLE hypw.standingwater_bank (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.standingwater ON DELETE CASCADE,
href text
);
CREATE TABLE hypw.standingwater_drainsbasin (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.standingwater ON DELETE CASCADE,
href text,
nil boolean,
nilreason text
);
CREATE TABLE hypw.standingwater_neighbour (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.standingwater ON DELETE CASCADE,
href text
);
ALTER TABLE hypw.standingwater_geographicalname OWNER TO elf;
ALTER TABLE hypw.standingwater_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.standingwater_hydroid OWNER TO elf;
ALTER TABLE hypw.standingwater OWNER TO elf;
ALTER TABLE hypw.standingwater_bank OWNER TO elf;
ALTER TABLE hypw.standingwater_drainsbasin OWNER TO elf;
ALTER TABLE hypw.standingwater_neighbour OWNER TO elf;
-- Watercourse
CREATE TABLE hypw.watercourse_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.watercourse_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.watercourse_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.watercourse_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.watercourse (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
lod_denominator_nilreason text,
lod_denominator integer,
localtype_xlink_href text,
localtype_xlink_title text,
localtype_nilreason text,
localtype_nil boolean,
localtype_localisedcharacterstring text,
localtype_localisedcharacterstring_id text,
localtype_localisedcharacterstring_locale text,
origin text,
origin_nilreason text,
origin_nil boolean,
persistence_href text,
persistence_nil boolean,
persistence_nilreason text,
tidal boolean,
tidal_nilreason text,
tidal_nil boolean,
condition_href text,
delineationknown boolean,
delineationknown_nilreason text,
delineationknown_nil boolean,
length numeric,
length_nilreason text,
length_uom text,
length_nil boolean,
level text,
level_nilreason text,
level_nil boolean,
streamorder_nilreason text,
streamorder_nil boolean,
streamorder_hydroordercode_order text,
streamorder_hydroordercode_orderscheme text,
streamorder_hydroordercode_scope text,
width_nilreason text,
width_nil boolean,
width_widthrange_lower numeric,
width_widthrange_lower_uom text,
width_widthrange_upper numeric,
width_widthrange_upper_uom text,
namedplace_fk integer REFERENCES hypw.watercourse_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.watercourse_hydroid ON DELETE RESTRICT,
CONSTRAINT watercourse_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'watercourse','geometry','4258','GEOMETRY', 2);
CREATE INDEX watercourse_geometry_idx ON hypw.watercourse USING GIST (geometry);
CREATE TABLE hypw.watercourse_bank (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.watercourse ON DELETE CASCADE,
href text
);
CREATE TABLE hypw.watercourse_drainsbasin (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.watercourse ON DELETE CASCADE,
href text,
nil boolean,
nilreason text
);
CREATE TABLE hypw.watercourse_neighbour (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.watercourse ON DELETE CASCADE,
href text
);
ALTER TABLE hypw.watercourse_geographicalname OWNER TO elf;
ALTER TABLE hypw.watercourse_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.watercourse_hydroid OWNER TO elf;
ALTER TABLE hypw.watercourse OWNER TO elf;
ALTER TABLE hypw.watercourse_bank OWNER TO elf;
ALTER TABLE hypw.watercourse_drainsbasin OWNER TO elf;
ALTER TABLE hypw.watercourse_neighbour OWNER TO elf;
-- Wetland
CREATE TABLE hypw.wetland_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.wetland_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.wetland_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.wetland_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.wetland (
localid text,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
localtype_xlink_href text,
localtype_xlink_title text,
localtype_nilreason text,
localtype_nil boolean,
localtype_localisedcharacterstring text,
localtype_localisedcharacterstring_id text,
localtype_localisedcharacterstring_locale text,
tidal boolean,
tidal_nilreason text,
tidal_nil boolean,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
namedplace_fk integer REFERENCES hypw.wetland_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.wetland_hydroid ON DELETE RESTRICT,
CONSTRAINT wetland_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'wetland','geometry','4258','GEOMETRY', 2);
CREATE INDEX wetland_geometry_idx ON hypw.wetland USING GIST (geometry);
ALTER TABLE hypw.wetland_geographicalname OWNER TO elf;
ALTER TABLE hypw.wetland_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.wetland_hydroid OWNER TO elf;
ALTER TABLE hypw.wetland OWNER TO elf;
-- Embankment
CREATE TABLE hypw.embankment_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.embankment_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.embankment_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.embankment_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.embankment (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
embankmenttype_href text,
embankmenttype_nilreason text,
predominantfeatureheight numeric,
predominantfeatureheight_uom text,
waterleveleffect_href text,
waterleveleffect_nilreason text,
namedplace_fk integer REFERENCES hypw.embankment_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.embankment_hydroid ON DELETE RESTRICT,
CONSTRAINT embankment_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'embankment','geometry','4258','GEOMETRY', 2);
CREATE INDEX embankment_geometry_idx ON hypw.embankment USING GIST (geometry);
ALTER TABLE hypw.embankment_geographicalname OWNER TO elf;
ALTER TABLE hypw.embankment_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.embankment_hydroid OWNER TO elf;
ALTER TABLE hypw.embankment OWNER TO elf;
-- Island
CREATE TABLE hypw.island_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.island_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.island_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.island_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.island (
localid text,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
namedplace_fk integer REFERENCES hypw.island_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.island_hydroid ON DELETE RESTRICT,
CONSTRAINT island_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'island','geometry','4258','GEOMETRY', 2);
CREATE INDEX island_geometry_idx ON hypw.island USING GIST (geometry);
ALTER TABLE hypw.island_geographicalname OWNER TO elf;
ALTER TABLE hypw.island_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.island_hydroid OWNER TO elf;
ALTER TABLE hypw.island OWNER TO elf;
-- LandWaterBoundary
CREATE TABLE hypw.landwaterboundary (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_owns boolean,
origin text,
origin_nilreason text,
origin_nil boolean,
waterlevelcategory_href text,
waterlevelcategory_nil boolean,
waterlevelcategory_nilreason text,
CONSTRAINT landwaterboundary_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'landwaterboundary','geometry','4258','GEOMETRY', 2);
CREATE INDEX landwaterboundary_geometry_idx ON hypw.landwaterboundary USING GIST (geometry);
CREATE TABLE hypw.landwaterboundary_delimit (
id serial PRIMARY KEY,
parentfk text NOT NULL REFERENCES hypw.landwaterboundary ON DELETE CASCADE,
waterleveleffect_href text
);
ALTER TABLE hypw.landwaterboundary OWNER TO elf;
ALTER TABLE hypw.landwaterboundary_delimit OWNER TO elf;
-- ShorelineConstruction
CREATE TABLE hypw.shorelineconstruction_geographicalname (
id serial PRIMARY KEY,
nilreason text,
nil boolean,
language text,
language_nilreason text,
language_nil boolean,
nativeness_href text,
nativeness_nilreason text,
namestatus_href text,
namestatus_nilreason text,
sourceofname text,
sourceofname_nilreason text,
sourceofname_nil boolean,
pronunciation_nilreason text,
pronunciation_nil boolean,
pronunciation_pronunciationsoundlink text,
pronunciation_pronunciationsoundlink_nilreason text,
pronunciation_pronunciationsoundlink_nil boolean,
pronunciation_pronunciationipa text,
pronunciation_pronunciationipa_nilreason text,
pronunciation_pronunciationipa_nil boolean
);
CREATE TABLE hypw.shorelineconstruction_geographicalname_spelling (
id serial PRIMARY KEY,
parentfk integer NOT NULL REFERENCES hypw.shorelineconstruction_geographicalname ON DELETE CASCADE,
spellingofname_text text,
spellingofname_script text,
spellingofname_script_nilreason text,
spellingofname_script_nil boolean,
spellingofname_transliterationscheme text,
spellingofname_transliterationscheme_nilreason text,
spellingofname_transliterationscheme_nil boolean
);
CREATE TABLE hypw.shorelineconstruction_hydroid (
id serial PRIMARY KEY,
hydroidentifier_classificationscheme text,
hydroidentifier_localid text,
hydroidentifier_namespace text
);
CREATE TABLE hypw.shorelineconstruction (
localid text,
beginlifespanversion timestamp,
beginlifespanversion_nilreason text,
beginlifespanversion_nil boolean,
condition_href text,
condition_nil boolean,
condition_nilreason text,
geometry_owns boolean,
geometry_nilreason text,
geometry_remoteschema text,
geometry_nil boolean,
lod_denominator_nilreason text,
lod_denominator integer,
shorelineconstructiontype_href text,
shorelineconstructiontype_nilreason text,
namedplace_fk integer REFERENCES hypw.shorelineconstruction_geographicalname ON DELETE RESTRICT,
hydro_id_fk integer REFERENCES hypw.shorelineconstruction_hydroid ON DELETE RESTRICT,
CONSTRAINT shorelineconstruction_pkey PRIMARY KEY (localid)
);
SELECT ADDGEOMETRYCOLUMN('hypw', 'shorelineconstruction','geometry','4258','GEOMETRY', 2);
CREATE INDEX shorelineconstruction_geometry_idx ON hypw.shorelineconstruction USING GIST (geometry);
ALTER TABLE hypw.shorelineconstruction_geographicalname OWNER TO elf;
ALTER TABLE hypw.shorelineconstruction_geographicalname_spelling OWNER TO elf;
ALTER TABLE hypw.shorelineconstruction_hydroid OWNER TO elf;
ALTER TABLE hypw.shorelineconstruction OWNER TO elf;
|
<filename>DO288/labs/external-service/drop-todo.sql<gh_stars>0
USE mysql;
DROP DATABASE IF EXISTS todo;
DROP USER 'todoapp'@'localhost';
DROP USER 'todoapp'@'%';
|
INSERT INTO users2 (email) VALUES ('<EMAIL>')
INSERT INTO users2 (email) VALUES ('<EMAIL>')
INSERT INTO shop_list (name, crossed) VALUES ('nakupni kosik', FALSE )
|
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int COMMENT 'Add a comment'
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=4);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=5);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=6);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=7);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=8);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=10);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=11);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=12);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=4);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=5);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=6);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=7);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=8);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=10);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=11);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=12);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month
FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090301.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090401.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=4);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090501.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=5);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090601.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=6);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090701.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=7);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090801.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=8);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/090901.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=9);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/091001.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=10);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/091101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=11);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/091201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=12);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100301.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100401.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=4);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100501.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=5);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100601.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=6);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100701.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=7);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100801.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=8);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/100901.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=9);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/101001.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=10);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/101101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=11);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypes/101201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=12);
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
id INT PRIMARY KEY,
bool_col BOOLEAN,
tinyint_col TINYINT,
smallint_col SMALLINT,
int_col INT,
bigint_col BIGINT,
float_col FLOAT,
double_col DOUBLE,
date_string_col STRING,
string_col STRING,
timestamp_col TIMESTAMP,
year INT,
month INT
)
PARTITION BY HASH (id) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col,
timestamp_col, year, month
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesnopart
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypessmall
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- HBASE_REGION_SPLITS
'1','3','5','7','9'
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=4);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month
FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesSmall/090101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesSmall/090201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesSmall/090301.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesSmall/090401.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=4);
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
id INT PRIMARY KEY,
bool_col BOOLEAN,
tinyint_col TINYINT,
smallint_col SMALLINT,
int_col INT,
bigint_col BIGINT,
float_col FLOAT,
double_col DOUBLE,
date_string_col STRING,
string_col STRING,
timestamp_col TIMESTAMP,
year INT,
month INT
)
PARTITION BY HASH (id) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col,
timestamp_col, year, month
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypestiny
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2009, month=4);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month
FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesTiny/090101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesTiny/090201.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesTiny/090301.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesTiny/090401.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=4);
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
id INT PRIMARY KEY,
bool_col BOOLEAN,
tinyint_col TINYINT,
smallint_col SMALLINT,
int_col INT,
bigint_col BIGINT,
float_col FLOAT,
double_col DOUBLE,
date_string_col STRING,
string_col STRING,
timestamp_col TIMESTAMP,
year INT,
month INT
)
PARTITION BY HASH (id) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT INTO TABLE {db_name}{db_suffix}.{table_name}
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col,
timestamp_col, year, month
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesinsert
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
LIKE {db_name}{db_suffix}.alltypes STORED AS {file_format};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesnopart_insert
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
LIKE {db_name}{db_suffix}.alltypesnopart STORED AS {file_format};
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_overwrite_nopart
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (col1 int)
STORED AS {file_format};
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_overwrite_partitioned
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (col1 int)
PARTITIONED BY (col2 int)
STORED AS {file_format};
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_string_partitioned
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (s1 string)
PARTITIONED BY (s2 string)
STORED AS {file_format};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypeserror
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp)
partitioned by (year int, month int)
row format delimited fields terminated by ',' escaped by '\\'
stored as {file_format}
LOCATION '{hdfs_location}';
USE {db_name}{db_suffix};
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=3);
-- Create external temp table with desired file format with same data file location
-- Tmp tables must not specify an escape character we don't want any
-- data transformation to happen when inserting it into tmp tables.
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}_tmp (
id STRING,
bool_col STRING,
tinyint_col STRING,
smallint_col STRING,
int_col STRING,
bigint_col STRING,
float_col STRING,
double_col STRING,
date_string_col STRING,
string_col STRING,
timestamp_col STRING)
PARTITIONED BY (year INT, month INT)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS {file_format}
LOCATION '{hdfs_location}';
-- Make metastore aware of the partition directories for the temp table
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=1);
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=2);
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=3);
---- TABLE_PROPERTIES
transactional=false
---- DEPENDENT_LOAD
USE {db_name}{db_suffix};
-- Step 4: Stream the data from tmp text table to desired format tmp table
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}_tmp PARTITION (year, month)
SELECT * FROM {db_name}.{table_name}_tmp;
-- Cleanup the temp table
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name}_tmp;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesError/0901.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesError/0902.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesError/0903.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=3);
====
---- DATASET
functional
---- BASE_TABLE_NAME
hbasealltypeserror
---- CREATE_HIVE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
"hbase.columns.mapping" =
":key,d:bool_col,d:tinyint_col,d:smallint_col,d:int_col,d:bigint_col,d:float_col,d:double_col,d:date_string_col,d:string_col,d:timestamp_col"
)
TBLPROPERTIES("hbase.table.name" = "functional_hbase.hbasealltypeserror");
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_promoted
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int COMMENT 'Add a comment'
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- DEPENDENT_LOAD_HIVE
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.alltypes;
ALTER TABLE {db_name}{db_suffix}.{table_name} SET tblproperties('EXTERNAL'='FALSE','transactional'='true');
---- TABLE_PROPERTIES
transactional=false
====
---- DATASET
functional
---- BASE_TABLE_NAME
hbasecolumnfamilies
---- HBASE_COLUMN_FAMILIES
0
1
2
3
d
---- CREATE_HIVE
-- Create an HBase table with multiple column families
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
"hbase.columns.mapping" =
":key,0:bool_col,1:tinyint_col,2:smallint_col,3:int_col,d:bigint_col,d:float_col,d:double_col,d:date_string_col,d:string_col,d:timestamp_col"
)
TBLPROPERTIES("hbase.table.name" = "functional_hbase.hbasecolumnfamilies");
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col,
date_string_col, string_col, timestamp_col FROM functional.alltypestiny;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypeserrornonulls
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp)
partitioned by (year int, month int)
row format delimited fields terminated by ',' escaped by '\\'
stored as {file_format}
LOCATION '{hdfs_location}';
-- Make metastore aware of the new partitions directories
-- ALTER does not take a fully qualified name.
USE {db_name}{db_suffix};
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009, month=3);
-- Create external temp table with desired file format with same data file location
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}_tmp (
id STRING,
bool_col STRING,
tinyint_col STRING,
smallint_col STRING,
int_col STRING,
bigint_col STRING,
float_col STRING,
double_col STRING,
date_string_col STRING,
string_col STRING,
timestamp_col STRING)
PARTITIONED BY (year INT, month INT)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
ESCAPED BY '\\'
STORED AS {file_format}
LOCATION '{hdfs_location}';
-- Make metastore aware of the partition directories
USE {db_name}{db_suffix};
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=1);
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=2);
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION (year=2009, month=3);
---- TABLE_PROPERTIES
transactional=false
---- DEPENDENT_LOAD
USE {db_name}{db_suffix};
-- Step 4: Stream the data from tmp text table to desired format tmp table
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}_tmp PARTITION (year, month)
SELECT * FROM {db_name}.{table_name}_tmp;
-- Cleanup the temp table
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name}_tmp;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesErrorNoNulls/0901.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesErrorNoNulls/0902.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/AllTypesErrorNoNulls/0903.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2009, month=3);
====
---- DATASET
functional
---- BASE_TABLE_NAME
hbasealltypeserrornonulls
---- CREATE_HIVE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
"hbase.columns.mapping" =
":key,d:bool_col,d:tinyint_col,d:smallint_col,d:int_col,d:bigint_col,d:float_col,d:double_col,d:date_string_col,d:string_col,d:timestamp_col"
)
TBLPROPERTIES("hbase.table.name" = "functional_hbase.hbasealltypeserrornonulls");
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_deleted_rows
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int COMMENT 'Add a comment'
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- DEPENDENT_LOAD_ACID
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.alltypes;
DELETE FROM {db_name}{db_suffix}.{table_name} WHERE month % 2 = 0 and year % 2 = 0 and id % 10 = 0;
---- TABLE_PROPERTIES
transactional=true
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesagg
---- PARTITION_COLUMNS
year int
month int
day int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- HBASE_REGION_SPLITS
'1','3','5','7','9'
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=4);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=5);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=6);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=7);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=8);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=10);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=NULL);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month, day)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day
FROM {db_name}.{table_name};
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100102.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100103.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100104.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=4);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100105.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=5);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100106.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=6);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100107.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=7);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100108.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=8);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100109.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=9);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAgg/100110.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=10);
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, tinyint_col as day FROM {db_name}.{table_name} WHERE year=2010 and month=1 and day IS NOT NULL and tinyint_col IS NULL order by id;
---- CREATE_KUDU
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name}_idx;
CREATE TABLE {db_name}{db_suffix}.{table_name}_idx (
kudu_idx BIGINT PRIMARY KEY,
id INT NULL,
bool_col BOOLEAN NULL,
tinyint_col TINYINT NULL,
smallint_col SMALLINT NULL,
int_col INT NULL,
bigint_col BIGINT NULL,
float_col FLOAT NULL,
double_col DOUBLE NULL,
date_string_col STRING NULL,
string_col STRING NULL,
timestamp_col TIMESTAMP NULL,
year INT NULL,
month INT NULL,
day INT NULL
)
PARTITION BY HASH (kudu_idx) PARTITIONS 3 STORED AS KUDU;
CREATE VIEW {db_name}{db_suffix}.{table_name} AS
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col,
double_col, date_string_col, string_col, timestamp_col, year, month, day
FROM {db_name}{db_suffix}.{table_name}_idx;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}_idx
SELECT row_number() over (order by year, month, id, day),
id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col,
double_col, date_string_col, string_col,
timestamp_col, year, month, day
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesaggnonulls
---- PARTITION_COLUMNS
year int
month int
day int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=4);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=5);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=6);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=7);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=8);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=10);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition (year, month, day)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day
FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100101.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=1);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100102.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=2);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100103.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100104.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=4);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100105.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=5);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100106.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=6);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100107.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=7);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100108.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=8);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100109.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=9);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/target/AllTypesAggNoNulls/100110.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=1, day=10);
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
id INT PRIMARY KEY,
bool_col BOOLEAN,
tinyint_col TINYINT,
smallint_col SMALLINT,
int_col INT,
bigint_col BIGINT,
float_col FLOAT,
double_col DOUBLE,
date_string_col STRING,
string_col STRING,
timestamp_col TIMESTAMP,
year INT,
month INT,
day INT
)
PARTITION BY HASH (id) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col,
double_col, date_string_col, string_col,
timestamp_col, year, month, day
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
allcomplextypes
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int
int_array_col array<int>
array_array_col array<array<int>>
map_array_col array<map<string,int>>
struct_array_col array<struct<f1: bigint, f2: string>>
int_map_col map<string, int>
array_map_col map<string, array<int>>
map_map_col map<string, map<string, int>>
struct_map_col map<string, struct<f1: bigint, f2: string>>
int_struct_col struct<f1: int, f2: int>
complex_struct_col struct<f1: int, f2: array<int>, f3: map<string, int>>
nested_struct_col struct<f1: int, f2: struct<f11: bigint, f12: struct<f21: bigint>>>
complex_nested_struct_col struct<f1: int, f2: array<struct<f11: bigint, f12: map<string, struct<f21: bigint>>>>>
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl
---- COLUMNS
id bigint
int_array array<int>
int_array_array array<array<int>>
int_map map<string, int>
int_map_array array<map<string, int>>
nested_struct struct<a: int, b: array<int>, c: struct<d: array<array<struct<e: int, f: string>>>>, g: map<string, struct<h: struct<i: array<double>>>>>
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/complextypestbl_parquet && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/nullable.parq \
/test-warehouse/complextypestbl_parquet/ && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/nonnullable.parq \
/test-warehouse/complextypestbl_parquet/
---- DEPENDENT_LOAD_ACID
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM functional_parquet.complextypestbl;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_structs
---- PARTITION_COLUMNS
year int
month int
---- COLUMNS
id int
struct_val struct<bool_col:boolean, tinyint_col:tinyint, smallint_col:smallint, int_col:int, bigint_col:bigint, float_col:float, double_col:double, date_string_col:string, string_col:string, timestamp_col:timestamp>
---- DEPENDENT_LOAD_HIVE
INSERT INTO {db_name}{db_suffix}.{table_name}
PARTITION (year, month)
SELECT
id,
named_struct(
'bool_col', bool_col,
'tinyint_col', tinyint_col,
'smallint_col', smallint_col,
'int_col', int_col,
'bigint_col', bigint_col,
'float_col', float_col,
'double_col', double_col,
'date_string_col', date_string_col,
'string_col', string_col,
'timestamp_col', timestamp_col),
year,
month
FROM {db_name}{db_suffix}.alltypes;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypes_structs
---- COLUMNS
id int
str string
alltypes struct<ti:tinyint, si:smallint, i:int, bi:bigint, b:boolean, f:float, do:double, da:date, ts:timestamp, s1:string, s2:string, c1:char(1), c2:char(3), vc:varchar(10), de1:decimal(5, 0), de2:decimal(10, 3)>
tiny_struct struct<b:boolean>
small_struct struct<i:int, s:string>
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/complextypes_structs_parquet && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/structs.parq \
/test-warehouse/complextypes_structs_parquet/
---- DEPENDENT_LOAD_ACID
LOAD DATA LOCAL INPATH '{impala_home}/testdata/ComplexTypesTbl/structs.orc' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypes_nested_structs
---- COLUMNS
id int
outer_struct struct<str:string,inner_struct1:struct<str:string,de:decimal(8,2)>,inner_struct2:struct<i:int,str:string>,inner_struct3:struct<s:struct<i:int,s:string>>>
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/complextypes_nested_structs_parquet && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/structs_nested.parq \
/test-warehouse/complextypes_nested_structs_parquet/
---- DEPENDENT_LOAD_ACID
LOAD DATA LOCAL INPATH '{impala_home}/testdata/ComplexTypesTbl/structs_nested.orc' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl_minor_compacted
---- COLUMNS
id bigint
int_array array<int>
int_array_array array<array<int>>
int_map map<string, int>
int_map_array array<map<string, int>>
nested_struct struct<a: int, b: array<int>, c: struct<d: array<array<struct<e: int, f: string>>>>, g: map<string, struct<h: struct<i: array<double>>>>>
---- DEPENDENT_LOAD_ACID
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 1;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 2;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 3;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 4;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 5;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 6;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 7;
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl where id = 8;
ALTER TABLE {db_name}{db_suffix}.{table_name} compact 'minor';
---- TABLE_PROPERTIES
transactional=true
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl_deleted_rows
---- COLUMNS
id bigint
int_array array<int>
int_array_array array<array<int>>
int_map map<string, int>
int_map_array array<map<string, int>>
nested_struct struct<a: int, b: array<int>, c: struct<d: array<array<struct<e: int, f: string>>>>, g: map<string, struct<h: struct<i: array<double>>>>>
---- DEPENDENT_LOAD_ACID
INSERT INTO TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}{db_suffix}.complextypestbl;
DELETE FROM {db_name}{db_suffix}.{table_name} WHERE id % 2 = 0;
====
---- DATASET
functional
---- BASE_TABLE_NAME
pos_item_key_value_complextypestbl
---- COLUMNS
pos bigint
item int
key string
value int
int_array array<int>
int_map map<string, int>
---- DEPENDENT_LOAD_HIVE
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT id, id, CAST(id AS STRING), CAST(id AS STRING), int_array, int_map FROM {db_name}{db_suffix}.complextypestbl;
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl_non_transactional
---- COLUMNS
id bigint
int_array array<int>
int_array_array array<array<int>>
int_map map<string, int>
int_map_array array<map<string, int>>
nested_struct struct<a: int, b: array<int>, c: struct<d: array<array<struct<e: int, f: string>>>>, g: map<string, struct<h: struct<i: array<double>>>>>
---- TABLE_PROPERTIES
transactional=false
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/complextypestbl_non_transactional_orc_def && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/nullable.orc \
/test-warehouse/complextypestbl_non_transactional_orc_def/ && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/ComplexTypesTbl/nonnullable.orc \
/test-warehouse/complextypestbl_non_transactional_orc_def/
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl_medium
---- COLUMNS
id bigint
int_array array<int>
int_array_array array<array<int>>
int_map map<string, int>
int_map_array array<map<string, int>>
nested_struct struct<a: int, b: array<int>, c: struct<d: array<array<struct<e: int, f: string>>>>, g: map<string, struct<h: struct<i: array<double>>>>>
---- DEPENDENT_LOAD_HIVE
-- This INSERT must run in Hive, because Impala doesn't support inserting into tables
-- with complex types.
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT c.* FROM functional_parquet.complextypestbl c join functional.alltypes sort by id;
====
---- DATASET
functional
---- BASE_TABLE_NAME
multipartformat
---- CREATE_HIVE
-- Used to test dynamic and static insert into partitioned tables which contains
-- supported and unsupported file formats.
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (id int)
PARTITIONED BY (p string);
---- LOAD
ALTER TABLE {db_name}{db_suffix}.{table_name} ADD PARTITION (p='parquet');
ALTER TABLE {db_name}{db_suffix}.{table_name} ADD PARTITION (p='orc');
ALTER TABLE {db_name}{db_suffix}.{table_name} PARTITION (p='parquet')
SET FILEFORMAT PARQUET;
ALTER TABLE {db_name}{db_suffix}.{table_name} PARTITION (p='orc')
SET FILEFORMAT ORC;
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypes_fileformat
---- CREATE_HIVE
-- Used for positive/negative testing of complex types on various file formats.
-- In particular, queries on file formats for which we do not support complex types
-- should fail gracefully.
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
s struct<f1:string,f2:int>,
a array<int>,
m map<string,bigint>)
STORED AS {file_format};
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
---- DEPENDENT_LOAD_HIVE
-- This INSERT must run in Hive, because Impala doesn't support inserting into tables
-- with complex types.
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM functional.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypes_multifileformat
---- CREATE_HIVE
-- Used for positive/negative testing of complex types on various file formats.
-- In particular, queries on file formats for which we do not support complex types
-- should fail gracefully. This table allows testing at a partition granularity.
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
s struct<f1:string,f2:int>,
a array<int>,
m map<string,bigint>)
PARTITIONED BY (p int)
STORED AS {file_format};
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(p=1) SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(p=2) SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(p=3) SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(p=4) SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(p=5) SELECT id, named_struct("f1",string_col,"f2",int_col), array(1, 2, 3), map("k", cast(0 as bigint)) FROM functional.alltypestiny;
-- The order of insertions and alterations is deliberately chose to work around a Hive
-- bug where the format of an altered partition is reverted back to the original format after
-- an insert. So we first do the insert, and then alter the format.
USE {db_name}{db_suffix};
ALTER TABLE {table_name} PARTITION (p=2) SET FILEFORMAT PARQUET;
ALTER TABLE {table_name} PARTITION (p=3) SET FILEFORMAT AVRO;
ALTER TABLE {table_name} PARTITION (p=4) SET FILEFORMAT RCFILE;
ALTER TABLE {table_name} PARTITION (p=5) SET FILEFORMAT ORC;
USE default;
====
---- DATASET
functional
---- BASE_TABLE_NAME
testtbl
---- COLUMNS
id bigint
name string
zip int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
id bigint primary key,
name string null,
zip int null
)
partition by range(id) (partition values <= 1003, partition 1003 < values <= 1007,
partition 1007 < values) stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
dimtbl
---- COLUMNS
id bigint
name string
zip int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/DimTbl/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
id bigint primary key,
name string,
zip int
)
partition by range(id) (partition values <= 1003, partition 1003 < values <= 1007,
partition 1007 < values) stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
jointbl
---- COLUMNS
test_id bigint
test_name string
test_zip int
alltypes_id int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/JoinTbl/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
test_id bigint,
test_name string,
test_zip int,
alltypes_id int,
primary key (test_id, test_name, test_zip, alltypes_id)
)
partition by range(test_id, test_name)
(partition values <= (1003, 'Name3'),
partition (1003, 'Name3') < values <= (1007, 'Name7'),
partition (1007, 'Name7') < values)
stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
liketbl
---- COLUMNS
str_col string
match_like_col string
no_match_like_col string
match_regex_col string
no_match_regex_col string
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/LikeTbl/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypessmallbinary
---- CREATE_HIVE
-- This table does not define a ':key' column spec. If one is not specified, the
-- first column is implied.
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp,
year int,
month int)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
"hbase.columns.mapping" =
"d:bool_col#-,
d:tinyint_col#-,
d:smallint_col#-,
d:int_col#-,
d:bigint_col#-,
d:float_col#-,
d:double_col#-,
d:date_string_col#-,
d:string_col#-,
d:timestamp_col#s,
d:year#-,
d:month#-"
)
TBLPROPERTIES ("hbase.table.name" = "functional_hbase.alltypessmallbinary",
"hbase.table.default.storage.type" = "binary");
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT * FROM functional.alltypessmall;
====
---- DATASET
functional
---- BASE_TABLE_NAME
insertalltypesaggbinary
---- CREATE_HIVE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col timestamp,
year int,
month int,
day int)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES (
"hbase.columns.mapping" =
":key#b,d:bool_col#b,d:tinyint_col#b,d:smallint_col#b,d:int_col#b,d:bigint_col#b,d:float_col#b,d:double_col#b,d:date_string_col,d:string_col,d:timestamp_col,d:year#b,d:month#b,d:day#b"
)
TBLPROPERTIES("hbase.table.name" = "functional_hbase.insertalltypesaggbinary");
====
---- DATASET
functional
---- BASE_TABLE_NAME
insertalltypesagg
---- PARTITION_COLUMNS
year int
month int
day int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
====
---- DATASET
functional
---- BASE_TABLE_NAME
stringids
---- PARTITION_COLUMNS
year int
month int
day int
---- COLUMNS
id string
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_view
---- CREATE
CREATE VIEW IF NOT EXISTS {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.alltypes;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_hive_view
---- CREATE_HIVE
-- Test that Impala can handle incorrect column metadata created by Hive (IMPALA-994).
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
-- Beeline cannot handle the stmt below when broken up into multiple lines.
CREATE VIEW {db_name}{db_suffix}.{table_name} AS SELECT * FROM {db_name}{db_suffix}.alltypes;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_view_sub
---- CREATE
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE VIEW {db_name}{db_suffix}.{table_name} (x, y, z)
AS SELECT int_col, string_col, timestamp_col FROM {db_name}{db_suffix}.alltypes;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
complex_view
---- CREATE
CREATE VIEW IF NOT EXISTS {db_name}{db_suffix}.complex_view
(abc COMMENT 'agg', xyz COMMENT 'gby') AS
SELECT COUNT(a.bigint_col), b.string_col FROM
{db_name}{db_suffix}.alltypesagg a INNER JOIN {db_name}{db_suffix}.alltypestiny b
ON a.id = b.id WHERE a.bigint_col < 50
GROUP BY b.string_col HAVING COUNT(a.bigint_col) > 1
ORDER BY b.string_col LIMIT 100;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
view_view
---- CREATE
CREATE VIEW IF NOT EXISTS {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.alltypes_view;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
subquery_view
---- CREATE
CREATE VIEW IF NOT EXISTS {db_name}{db_suffix}.{table_name}
AS SELECT COUNT(*) FROM {db_name}{db_suffix}.alltypes
WHERE id IN (SELECT id FROM {db_name}{db_suffix}.alltypessmall where int_col < 5);
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_parens
---- CREATE
CREATE VIEW IF NOT EXISTS {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.alltypes
WHERE year = 2009 and (int_col < 100 OR bool_col = false) and month = 1;
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
text_comma_backslash_newline
---- COLUMNS
col1 string
col2 string
col3 int
col4 int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\' lines terminated by '\n'
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/text-comma-backslash-newline.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
text_dollar_hash_pipe
---- COLUMNS
col1 string
col2 string
col3 int
col4 int
---- ROW_FORMAT
delimited fields terminated by '$' escaped by '#' lines terminated by '|'
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/text-dollar-hash-pipe.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
text_thorn_ecirc_newline
---- COLUMNS
col1 string
col2 string
col3 int
col4 int
---- ROW_FORMAT
-- -2 => ASCII 254 (thorn character) and -22 is a lowercase e with a circumflex
delimited fields terminated by '-2' escaped by '-22' lines terminated by '\n'
---- LOAD
-- Hive has a bug where it will not load a table's table metadata if ESCAPED BY and
-- TERMINATED BY are specified at the same time and set to extended ASCII characters.
-- To work around this, the data file is loaded into a temp table with the same location.
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}_tmp(i int) LOCATION '/test-warehouse/{table_name}';
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/text-thorn-ecirc-newline.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name}_tmp;
DROP TABLE {db_name}{db_suffix}.{table_name}_tmp;
====
---- DATASET
functional
---- BASE_TABLE_NAME
overflow
---- COLUMNS
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
decimal0_col DECIMAL(13,4)
decimal1_col DECIMAL(38,0)
decimal2_col DECIMAL(38,38)
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/overflow.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
widerow
---- COLUMNS
string_col string
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/widerow.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
greptiny
---- COLUMNS
field string
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/hive_benchmark/grepTiny/part-00000' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
rankingssmall
---- COLUMNS
pageRank int
pageURL string
avgDuration int
---- ROW_FORMAT
delimited fields terminated by '|'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/hive_benchmark/htmlTiny/Rankings.dat' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
uservisitssmall
---- COLUMNS
sourceIP string
destURL string
visitDate string
adRevenue float
userAgent string
cCode string
lCode string
sKeyword string
avgTimeOnSite int
---- ROW_FORMAT
delimited fields terminated by '|'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/hive_benchmark/htmlTiny/UserVisits.dat' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
emptytable
---- PARTITION_COLUMNS
f2 int
---- COLUMNS
field string
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
field STRING PRIMARY KEY,
f2 INT
)
PARTITION BY HASH (field) PARTITIONS 3 STORED AS KUDU;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesaggmultifiles
---- PARTITION_COLUMNS
year int
month int
day int
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=1);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=2);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=3);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=4);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=5);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=6);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=7);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=8);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=10);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(year=2010, month=1, day=NULL);
---- DEPENDENT_LOAD
insert overwrite table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM {db_name}.{table_name} where id % 4 = 0;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM {db_name}.{table_name} where id % 4 = 1;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM {db_name}.{table_name} where id % 4 = 2;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM {db_name}.{table_name} where id % 4 = 3;
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
insert overwrite table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM functional.alltypesagg where id % 4 = 0;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM functional.alltypesagg where id % 4 = 1;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM functional.alltypesagg where id % 4 = 2;
insert into table {db_name}{db_suffix}.{table_name} partition (year, month, day) SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month, day FROM functional.alltypesagg where id % 4 = 3;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypesaggmultifilesnopart
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
---- DEPENDENT_LOAD
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM {db_name}.{table_name} where id % 4 = 0;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM {db_name}.{table_name} where id % 4 = 1;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM {db_name}.{table_name} where id % 4 = 2;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM {db_name}.{table_name} where id % 4 = 3;
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
insert overwrite table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM functional.alltypesagg where id % 4 = 0;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM functional.alltypesagg where id % 4 = 1;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM functional.alltypesagg where id % 4 = 2;
insert into table {db_name}{db_suffix}.{table_name} SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col FROM functional.alltypesagg where id % 4 = 3;
====
---- DATASET
functional
---- BASE_TABLE_NAME
stringpartitionkey
---- PARTITION_COLUMNS
string_col string
---- COLUMNS
id int
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (string_col = "partition1");
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (string_col = "2009-01-01 00:00:00");
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION(string_col)
SELECT id, timestamp_col as string_col from functional.alltypestiny
WHERE timestamp_col = "2009-01-01 00:00:00";
====
---- DATASET
functional
---- BASE_TABLE_NAME
tinytable
---- COLUMNS
a string
b string
---- ROW_FORMAT
delimited fields terminated by ','
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/TinyTable/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
a string primary key,
b string
)
partition by range(a) (partition values <= 'b', partition 'b' < values <= 'd',
partition 'd' < values) stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
tinyinttable
---- COLUMNS
int_col int
---- ROW_FORMAT
delimited fields terminated by ','
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/TinyIntTable/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
int_col int primary key
)
partition by range(int_col) (partition values <= 2, partition 2 < values <= 4,
partition 4 < values <= 6, partition 6 < values <= 8, partition 8 < values)
stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
nulltable
---- COLUMNS
a string
b string
c string
d int
e double
f string
g string
---- ROW_FORMAT
delimited fields terminated by ','
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} select * from functional.nulltable;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/NullTable/data.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
a string primary key, b string null, c string null, d int null, e double null,
f string null, g string null
)
partition by hash(a) partitions 3 stored as kudu;
====
---- DATASET
-- Table with varying ratios of nulls. Used to test NDV with nulls
-- Also useful to test null counts as the count varies from 0 to
-- some to all rows.
functional
---- BASE_TABLE_NAME
nullrows
---- COLUMNS
id string
blank string
null_str string
null_int int
null_double double
group_str string
some_nulls string
bool_nulls boolean
---- ROW_FORMAT
delimited fields terminated by ','
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} select * from functional.nullrows;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/NullRows/data.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
nullescapedtable
---- COLUMNS
a string
b string
c string
d int
e double
f string
g string
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} select * from functional.nulltable;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/NullTable/data.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
a string primary key, b string null, c string null, d int null, e double null,
f string null, g string null
)
partition by hash(a) partitions 3 stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
nullformat_custom
---- CREATE_HIVE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
a boolean,
b string,
c int,
d double)
STORED AS {file_format}
TBLPROPERTIES("serialization.null.format" = "xyz");
====
---- DATASET
functional
---- BASE_TABLE_NAME
TblWithRaggedColumns
---- COLUMNS
str_col string
int_col int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/TblWithRaggedColumns/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
zipcode_incomes
---- COLUMNS
id STRING
zip STRING
description1 STRING
description2 STRING
income int
---- ROW_FORMAT
DELIMITED FIELDS TERMINATED BY ','
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/ImpalaDemoDataset/DEC_00_SF3_P077_with_ann_noheader.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
create table {db_name}{db_suffix}.{table_name} (
id string primary key,
zip string null,
description1 string null,
description2 string null,
income int null)
partition by range(id)
(partition values <= '8600000US01475',
partition '8600000US01475' < values <= '8600000US63121',
partition '8600000US63121' < values <= '8600000US84712',
partition '8600000US84712' < values
) stored as kudu;
====
---- DATASET
functional
---- BASE_TABLE_NAME
unsupported_types
---- CREATE_HIVE
-- Create a table that mixes supported and unsupported scalar types.
-- We should be able to read the column values of supported types and
-- fail queries that reference columns of unsupported types.
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
int_col INT,
dec_col DECIMAL,
date_col DATE,
str_col STRING,
bin_col BINARY,
bigint_col BIGINT)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
STORED AS {file_format}
LOCATION '{hdfs_location}';
---- TABLE_PROPERTIES
transactional=false
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/UnsupportedTypes/data.csv' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
unsupported_partition_types
---- CREATE_HIVE
-- Create a table that is partitioned on an unsupported partition-column type
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
int_col INT)
PARTITIONED BY (t TIMESTAMP);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} SELECT * FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
old_rcfile_table
---- COLUMNS
key INT
value STRING
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/oldrcfile.rc'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_text_gzip
---- COLUMNS
s STRING
i INT
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_text_gzip/file_not_finished.gz'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_seq_snap
---- COLUMNS
field STRING
---- DEPENDENT_LOAD_HIVE
-- This data file contains format errors and is accessed by the unit test: sequence-file-recover-test.
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_seq_snap/bad_file'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_avro_snap_strings
---- COLUMNS
s STRING
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/negative_string_len.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/invalid_union.avro'
INTO TABLE {db_name}{db_suffix}.{table_name};
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/truncated_string.avro'
INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_avro_snap_floats
---- COLUMNS
c1 FLOAT
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/truncated_float.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_avro_decimal_schema
---- COLUMNS
name STRING
value DECIMAL(5,2)
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/invalid_decimal_schema.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_avro_date_out_of_range
---- COLUMNS
d DATE
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/out_of_range_date.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
hive2_bad_avro_date_pre_gregorian
---- COLUMNS
d DATE
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/hive2_pre_gregorian_date.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
hive3_avro_date_pre_gregorian
---- COLUMNS
d DATE
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/bad_avro_snap/hive3_pre_gregorian_date.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- IMPALA-694: uses data file produced by parquet-mr version 1.2.5-cdh4.5.0
functional
---- BASE_TABLE_NAME
bad_parquet
---- COLUMNS
field STRING
---- DEPENDENT_LOAD_HIVE
-- IMPALA-694: data file produced by parquet-mr version 1.2.5-cdh4.5.0
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_parquet_data.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
-- Data file produced by parquet-mr with repeated values (produces 0 bit width dictionary)
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/repeated_values.parquet'
INTO TABLE {db_name}{db_suffix}.{table_name};
-- IMPALA-720: data file produced by parquet-mr with multiple row groups
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/multiple_rowgroups.parquet'
INTO TABLE {db_name}{db_suffix}.{table_name};
-- IMPALA-1401: data file produced by Hive 13 containing page statistics with long min/max
-- string values
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/long_page_header.parquet'
INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_parquet_strings_negative_len
---- COLUMNS
s STRING
---- DEPENDENT_LOAD_HIVE
-- IMPALA-3732: parquet files with corrupt strings
LOAD DATA LOCAL INPATH
'{impala_home}/testdata/bad_parquet_data/dict-encoded-negative-len.parq'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
LOAD DATA LOCAL INPATH
'{impala_home}/testdata/bad_parquet_data/plain-encoded-negative-len.parq'
INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_parquet_strings_out_of_bounds
---- COLUMNS
s STRING
---- DEPENDENT_LOAD_HIVE
-- IMPALA-3732: parquet files with corrupt strings
LOAD DATA LOCAL INPATH
'{impala_home}/testdata/bad_parquet_data/dict-encoded-out-of-bounds.parq'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
LOAD DATA LOCAL INPATH
'{impala_home}/testdata/bad_parquet_data/plain-encoded-out-of-bounds.parq'
INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_parquet_decimals
---- COLUMNS
d1 DECIMAL(4, 2)
d2 DECIMAL(4, 2)
d3 DECIMAL(4, 2)
d4 DECIMAL(4, 2)
d5 DECIMAL(4, 2)
d6 DECIMAL(4, 2)
d7 DECIMAL(4, 2)
d8 DECIMAL(4, 2)
---- DEPENDENT_LOAD_HIVE
-- IMPALA-10808: parquet files with illegal decimal schemas
LOAD DATA LOCAL INPATH
'{impala_home}/testdata/bad_parquet_data/illegal_decimals.parq'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- IMPALA-2130: Wrong verification of parquet file version
functional
---- BASE_TABLE_NAME
bad_magic_number
---- COLUMNS
field STRING
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_magic_number.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- IMPALA-1658: Timestamps written by Hive are local-to-UTC adjusted.
functional
---- BASE_TABLE_NAME
alltypesagg_hive_13_1
---- COLUMNS
id int
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
date_string_col string
string_col string
timestamp_col timestamp
year int
month int
day int
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/alltypesagg_hive_13_1.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- Parquet file with invalid metadata size in the file footer.
functional
---- BASE_TABLE_NAME
bad_metadata_len
---- COLUMNS
field TINYINT
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_metadata_len.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- Parquet file with invalid column dict_page_offset.
functional
---- BASE_TABLE_NAME
bad_dict_page_offset
---- COLUMNS
field TINYINT
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_dict_page_offset.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- Parquet file with invalid column total_compressed_size.
functional
---- BASE_TABLE_NAME
bad_compressed_size
---- COLUMNS
field TINYINT
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_compressed_size.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- Parquet file with required columns written by Kite. Hive and Impala always write files
-- with fields as optional.
functional
---- BASE_TABLE_NAME
kite_required_fields
---- COLUMNS
req_int bigint
opt_int bigint
req_string string
opt_string string
req_bool boolean
opt_bool boolean
opt_int_2 bigint
opt_int_3 bigint
req_int_2 bigint
req_int_3 bigint
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/kite_required_fields.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- Parquet file with incorrect column metadata in multiple row groups
functional
---- BASE_TABLE_NAME
bad_column_metadata
---- COLUMNS
id bigint
int_array array<int>
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/bad_column_metadata.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bad_serde
---- CREATE_HIVE
-- For incompatible SerDe testing
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (col int)
ROW FORMAT serde "org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe";
====
---- DATASET
functional
---- BASE_TABLE_NAME
rcfile_lazy_binary_serde
---- CREATE_HIVE
-- For incompatible SerDe testing
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (int_col int)
ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'
STORED AS INPUTFORMAT
'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.RCFileOutputFormat';
====
---- DATASET
functional
---- BASE_TABLE_NAME
decimal_tbl
---- COLUMNS
d1 DECIMAL
d2 DECIMAL(10, 0)
d3 DECIMAL(20, 10)
d4 DECIMAL(38, 38)
d5 DECIMAL(10, 5)
---- PARTITION_COLUMNS
d6 DECIMAL(9, 0)
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(d6=1);
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/decimal_tbl.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(d6=1);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition(d6)
select * from functional.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
d1 DECIMAL,
d2 DECIMAL(10, 0),
d3 DECIMAL(20, 10),
d4 DECIMAL(38, 38),
d5 DECIMAL(10, 5),
d6 DECIMAL(9, 0),
PRIMARY KEY (d1, d2, d3, d4, d5, d6)
)
PARTITION BY HASH PARTITIONS 3
STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT d1, d2, d3, d4, d5, d6
FROM {db_name}.{table_name};
====
---- DATASET
-- Reasonably large table with decimal values. This is used for
-- testing min-max filters with decimal types on kudu tables
functional
---- BASE_TABLE_NAME
decimal_rtf_tbl
---- COLUMNS
d5_0 DECIMAL(5, 0)
d5_1 DECIMAL(5, 1)
d5_3 DECIMAL(5, 3)
d5_5 DECIMAL(5, 5)
d9_0 DECIMAL(9, 0)
d9_1 DECIMAL(9, 1)
d9_5 DECIMAL(9, 5)
d9_9 DECIMAL(9, 9)
d14_0 DECIMAL(14, 0)
d14_1 DECIMAL(14, 1)
d14_7 DECIMAL(14, 7)
d14_14 DECIMAL(14, 14)
d18_0 DECIMAL(18, 0)
d18_1 DECIMAL(18, 1)
d18_9 DECIMAL(18, 9)
d18_18 DECIMAL(18, 18)
d28_0 DECIMAL(28, 0)
d28_1 DECIMAL(28, 1)
d28_14 DECIMAL(28, 14)
d28_28 DECIMAL(28, 28)
d38_0 DECIMAL(38, 0)
d38_1 DECIMAL(38, 1)
d38_19 DECIMAL(38, 19)
d38_38 DECIMAL(38, 38)
---- PARTITION_COLUMNS
dpc DECIMAL(9, 0)
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(dpc=1);
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/decimal_rtf_tbl.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(dpc=1);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition(dpc)
select * from functional.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
d5_0 DECIMAL(5, 0),
d5_1 DECIMAL(5, 1),
d5_3 DECIMAL(5, 3),
d5_5 DECIMAL(5, 5),
d9_0 DECIMAL(9, 0),
d9_1 DECIMAL(9, 1),
d9_5 DECIMAL(9, 5),
d9_9 DECIMAL(9, 9),
d14_0 DECIMAL(14, 0),
d14_1 DECIMAL(14, 1),
d14_7 DECIMAL(14, 7),
d14_14 DECIMAL(14, 14),
d18_0 DECIMAL(18, 0),
d18_1 DECIMAL(18, 1),
d18_9 DECIMAL(18, 9),
d18_18 DECIMAL(18, 18),
d28_0 DECIMAL(28, 0),
d28_1 DECIMAL(28, 1),
d28_14 DECIMAL(28, 14),
d28_28 DECIMAL(28, 28),
d38_0 DECIMAL(38, 0),
d38_1 DECIMAL(38, 1),
d38_19 DECIMAL(38, 19),
d38_38 DECIMAL(38, 38),
PRIMARY KEY (d5_0, d5_1, d5_3, d5_5, d9_0, d9_1, d9_5, d9_9, d14_0, d14_1, d14_7, d14_14, d18_0, d18_1, d18_9, d18_18, d28_0, d28_1, d28_14, d28_28, d38_0, d38_1, d38_19, d38_38)
)
PARTITION BY HASH PARTITIONS 10
STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT d5_0, d5_1, d5_3, d5_5, d9_0, d9_1, d9_5, d9_9, d14_0, d14_1, d14_7, d14_14, d18_0, d18_1, d18_9, d18_18, d28_0, d28_1, d28_14, d28_28, d38_0, d38_1, d38_19, d38_38
FROM {db_name}.{table_name};
====
---- DATASET
-- Small table with decimal values. This is used for
-- testing min-max filters with decimal types on kudu tables
functional
---- BASE_TABLE_NAME
decimal_rtf_tiny_tbl
---- COLUMNS
d5_0 DECIMAL(5, 0)
d5_1 DECIMAL(5, 1)
d5_3 DECIMAL(5, 3)
d5_5 DECIMAL(5, 5)
d9_0 DECIMAL(9, 0)
d9_1 DECIMAL(9, 1)
d9_5 DECIMAL(9, 5)
d9_9 DECIMAL(9, 9)
d14_0 DECIMAL(14, 0)
d14_1 DECIMAL(14, 1)
d14_7 DECIMAL(14, 7)
d14_14 DECIMAL(14, 14)
d18_0 DECIMAL(18, 0)
d18_1 DECIMAL(18, 1)
d18_9 DECIMAL(18, 9)
d18_18 DECIMAL(18, 18)
d28_0 DECIMAL(28, 0)
d28_1 DECIMAL(28, 1)
d28_14 DECIMAL(28, 14)
d28_28 DECIMAL(28, 28)
d38_0 DECIMAL(38, 0)
d38_1 DECIMAL(38, 1)
d38_19 DECIMAL(38, 19)
d38_38 DECIMAL(38, 38)
---- PARTITION_COLUMNS
dpc DECIMAL(9, 0)
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(dpc=1);
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/decimal_rtf_tiny_tbl.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(dpc=1);
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} partition(dpc)
select * from functional.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
d5_0 DECIMAL(5, 0),
d5_1 DECIMAL(5, 1),
d5_3 DECIMAL(5, 3),
d5_5 DECIMAL(5, 5),
d9_0 DECIMAL(9, 0),
d9_1 DECIMAL(9, 1),
d9_5 DECIMAL(9, 5),
d9_9 DECIMAL(9, 9),
d14_0 DECIMAL(14, 0),
d14_1 DECIMAL(14, 1),
d14_7 DECIMAL(14, 7),
d14_14 DECIMAL(14, 14),
d18_0 DECIMAL(18, 0),
d18_1 DECIMAL(18, 1),
d18_9 DECIMAL(18, 9),
d18_18 DECIMAL(18, 18),
d28_0 DECIMAL(28, 0),
d28_1 DECIMAL(28, 1),
d28_14 DECIMAL(28, 14),
d28_28 DECIMAL(28, 28),
d38_0 DECIMAL(38, 0),
d38_1 DECIMAL(38, 1),
d38_19 DECIMAL(38, 19),
d38_38 DECIMAL(38, 38),
PRIMARY KEY (d5_0, d5_1, d5_3, d5_5, d9_0, d9_1, d9_5, d9_9, d14_0, d14_1, d14_7, d14_14, d18_0, d18_1, d18_9, d18_18, d28_0, d28_1, d28_14, d28_28, d38_0, d38_1, d38_19, d38_38)
)
PARTITION BY HASH PARTITIONS 10
STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT d5_0, d5_1, d5_3, d5_5, d9_0, d9_1, d9_5, d9_9, d14_0, d14_1, d14_7, d14_14, d18_0, d18_1, d18_9, d18_18, d28_0, d28_1, d28_14, d28_28, d38_0, d38_1, d38_19, d38_38
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
decimal_tiny
---- COLUMNS
c1 DECIMAL(10, 4)
c2 DECIMAL(15, 5)
c3 DECIMAL(1,1)
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/decimal-tiny.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
select * from functional.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
c1 DECIMAL(10, 4),
c2 DECIMAL(15, 5),
c3 DECIMAL(1, 1),
PRIMARY KEY (c1, c2, c3)
)
PARTITION BY HASH PARTITIONS 3
STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT c1, c2, c3
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
parent_table
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id INT, year string, primary key(id, year) DISABLE NOVALIDATE RELY)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
---- ROW_FORMAT
delimited fields terminated by '',''
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/parent_table.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
parent_table_2
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
a INT, primary key(a) DISABLE NOVALIDATE RELY)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/parent_table_2.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
child_table
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
seq int, id int, year string, a int, primary key(seq) DISABLE NOVALIDATE RELY, foreign key
(id, year) references {db_name}{db_suffix}.parent_table(id, year) DISABLE NOVALIDATE
RELY, foreign key(a) references {db_name}{db_suffix}.parent_table_2(a) DISABLE
NOVALIDATE RELY)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/child_table.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
chars_tiny
---- COLUMNS
cs CHAR(5)
cl CHAR(140)
vc VARCHAR(32)
---- ROW_FORMAT
delimited fields terminated by ','
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/chars-tiny.txt'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
select * from functional.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
widetable_250_cols
---- COLUMNS
`${IMPALA_HOME}/testdata/common/widetable.py --get_columns -n 250
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
select * from functional.{table_name};
---- LOAD
`${IMPALA_HOME}/testdata/common/widetable.py --create_data -n 250 -o /tmp/widetable_data.csv
====
---- DATASET
functional
---- BASE_TABLE_NAME
widetable_500_cols
---- COLUMNS
`${IMPALA_HOME}/testdata/common/widetable.py --get_columns -n 500
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
select * from functional.{table_name};
---- LOAD
`${IMPALA_HOME}/testdata/common/widetable.py --create_data -n 500 -o /tmp/widetable_data.csv
====
---- DATASET
functional
---- BASE_TABLE_NAME
widetable_1000_cols
---- COLUMNS
`${IMPALA_HOME}/testdata/common/widetable.py --get_columns -n 1000
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
select * from functional.{table_name};
---- LOAD
`${IMPALA_HOME}/testdata/common/widetable.py --create_data -n 1000 -o /tmp/widetable_data.csv
====
---- DATASET
functional
---- BASE_TABLE_NAME
avro_decimal_tbl
---- COLUMNS
name STRING
value DECIMAL(5,2)
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/avro_decimal_tbl.avro'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
no_avro_schema
---- CREATE_HIVE
-- Avro schema is inferred from the column definitions (IMPALA-1136)
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id int,
bool_col boolean,
tinyint_col tinyint,
smallint_col smallint,
int_col int,
bigint_col bigint,
float_col float,
double_col double,
date_string_col string,
string_col string,
timestamp_col string)
PARTITIONED BY (year int, month int)
STORED AS AVRO
LOCATION '/test-warehouse/alltypes_avro_snap';
---- ALTER
-- The second partition is added twice because there seems to be a Hive/beeline
-- bug where the last alter is not executed properly.
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2009,month=9);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2010,month=10);
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION (year=2010,month=10);
====
---- DATASET
functional
---- BASE_TABLE_NAME
table_no_newline
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id INT, col_1 BOOLEAN, col_2 DOUBLE, col_3 TIMESTAMP)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_no_newline.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
table_no_newline_part
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id INT, col_1 BOOLEAN, col_2 DOUBLE, col_3 TIMESTAMP)
partitioned by (year INT, month INT)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
ALTER TABLE {db_name}{db_suffix}.{table_name} ADD IF NOT EXISTS PARTITION (year=2015, month=3);
ALTER TABLE {db_name}{db_suffix}.{table_name} ADD IF NOT EXISTS PARTITION (year=2010, month=3);
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_no_newline.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2010, month=3);
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_missing_columns.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(year=2015, month=3);
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_only_transactional_table
---- HIVE_MAJOR_VERSION
3
---- CREATE_HIVE
---- COLUMNS
col1 int
---- TABLE_PROPERTIES
transactional=true
transactional_properties=insert_only
---- LOAD
-- TODO(todd) we need an empty load section with a comment in it here.
-- This works around some "logic" in generate-schema-statements.py that
-- says that, if a table has no LOAD section, it shouldn't be in non-text
-- formats.
====
---- DATASET
functional
---- BASE_TABLE_NAME
full_transactional_table
---- HIVE_MAJOR_VERSION
3
---- CREATE_HIVE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col1 int)
STORED AS ORC
TBLPROPERTIES('transactional'='true');
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_only_transactional_bucketed_table
---- HIVE_MAJOR_VERSION
3
---- CREATE_HIVE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col1 int, col2 int)
CLUSTERED BY (col1) INTO 5 BUCKETS
STORED AS ORC
TBLPROPERTIES('transactional'='true', 'transactional_properties'='insert_only');
====
---- DATASET
functional
---- BASE_TABLE_NAME
bucketed_ext_table
---- CREATE_HIVE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col1 int, col2 int)
CLUSTERED BY (col1) INTO 5 BUCKETS
STORED AS {file_format}
LOCATION '/test-warehouse/{db_name}{db_suffix}{table_name}';
====
---- DATASET
functional
---- BASE_TABLE_NAME
bucketed_table
---- CREATE_HIVE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col1 int, col2 int)
CLUSTERED BY (col1) INTO 5 BUCKETS
STORED AS {file_format};
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT id, int_col from functional.alltypes;
---- DEPENDENT_LOAD_HIVE
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT * from functional.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
materialized_view
---- HIVE_MAJOR_VERSION
3
---- CREATE_HIVE
-- The create materialized view command is moved down so that the database's
-- managed directory has been created. Otherwise the command would fail. This
-- is a bug in Hive.
CREATE MATERIALIZED VIEW IF NOT EXISTS {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.insert_only_transactional_table;
=====
---- DATASET
functional
---- BASE_TABLE_NAME
uncomp_src_alltypes
---- CREATE_HIVE
CREATE TABLE {db_name}{db_suffix}.{table_name} LIKE functional.alltypes STORED AS ORC;
---- DEPENDENT_LOAD_HIVE
SET orc.compress=NONE;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (year, month)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, date_string_col, string_col, timestamp_col, year, month
FROM functional.alltypes;
====
---- DATASET
functional
---- BASE_TABLE_NAME
uncomp_src_decimal_tbl
---- CREATE_HIVE
CREATE TABLE {db_name}{db_suffix}.{table_name} LIKE functional.decimal_tbl STORED AS ORC;
---- DEPENDENT_LOAD_HIVE
SET orc.compress=NONE;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (d6)
SELECT d1, d2, d3, d4, d5, d6 FROM functional.decimal_tbl;
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_16_lf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_16_lf' --file_len 16 --only_newline
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_16_crlf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_16_crlf' --file_len 16
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_17_lf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_17_lf' --file_len 17 --only_newline
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_17_crlf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_17_crlf' --file_len 17
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_32_lf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_32_lf' --file_len 32 --only_newline
====
---- DATASET
functional
---- BASE_TABLE_NAME
testescape_32_crlf
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
col string)
row format delimited fields terminated by ',' escaped by '\\'
LOCATION '/test-warehouse/{table_name}';
---- LOAD
`${IMPALA_HOME}/testdata/common/text_delims_table.py --table_dir '/tmp/testescape_32_crlf' --file_len 32
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltimezones
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
timezone STRING, utctime TIMESTAMP, localtime TIMESTAMP)
row format delimited fields terminated by ','
LOCATION '/test-warehouse/{table_name}';
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/timezoneverification.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
avro_unicode_nulls
---- CREATE_HIVE
create external table if not exists {db_name}{db_suffix}.{table_name} like {db_name}{db_suffix}.liketbl stored as avro LOCATION '{hdfs_location}';
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/avro_null_char/000000_0'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
-- IMPALA-1881: Maximize data locality when scanning Parquet files with multiple row groups.
functional
---- BASE_TABLE_NAME
lineitem_multiblock
---- COLUMNS
L_ORDERKEY BIGINT
L_PARTKEY BIGINT
L_SUPPKEY BIGINT
L_LINENUMBER INT
L_QUANTITY DECIMAL(12,2)
L_EXTENDEDPRICE DECIMAL(12,2)
L_DISCOUNT DECIMAL(12,2)
L_TAX DECIMAL(12,2)
L_RETURNFLAG STRING
L_LINESTATUS STRING
L_SHIPDATE STRING
L_COMMITDATE STRING
L_RECEIPTDATE STRING
L_SHIPINSTRUCT STRING
L_SHIPMODE STRING
L_COMMENT STRING
====
---- DATASET
-- IMPALA-2466: Add more tests to the HDFS Parquet scanner
functional
---- BASE_TABLE_NAME
lineitem_sixblocks
---- COLUMNS
L_ORDERKEY BIGINT
L_PARTKEY BIGINT
L_SUPPKEY BIGINT
L_LINENUMBER INT
L_QUANTITY DECIMAL(12,2)
L_EXTENDEDPRICE DECIMAL(12,2)
L_DISCOUNT DECIMAL(12,2)
L_TAX DECIMAL(12,2)
L_RETURNFLAG STRING
L_LINESTATUS STRING
L_SHIPDATE STRING
L_COMMITDATE STRING
L_RECEIPTDATE STRING
L_SHIPINSTRUCT STRING
L_SHIPMODE STRING
L_COMMENT STRING
====
---- DATASET
-- IMPALA-2466: Add more tests to the HDFS Parquet scanner (this has only one row group)
functional
---- BASE_TABLE_NAME
lineitem_multiblock_one_row_group
---- COLUMNS
L_ORDERKEY BIGINT
L_PARTKEY BIGINT
L_SUPPKEY BIGINT
L_LINENUMBER INT
L_QUANTITY DECIMAL(12,2)
L_EXTENDEDPRICE DECIMAL(12,2)
L_DISCOUNT DECIMAL(12,2)
L_TAX DECIMAL(12,2)
L_RETURNFLAG STRING
L_LINESTATUS STRING
L_SHIPDATE STRING
L_COMMITDATE STRING
L_RECEIPTDATE STRING
L_SHIPINSTRUCT STRING
L_SHIPMODE STRING
L_COMMENT STRING
====
---- DATASET
-- IMPALA-4933: tests nested collections stored in multiple row-groups.
---- BASE_TABLE_NAME
customer_multiblock
---- COLUMNS
C_CUSTKEY BIGINT
C_NAME STRING
C_ADDRESS STRING
C_NATIONKEY SMALLINT
C_PHONE STRING
C_ACCTBAL DECIMAL(12, 2)
C_MKTSEGMENT STRING
C_COMMENT STRING
C_ORDERS ARRAY<STRUCT<O_ORDERKEY: BIGINT, O_ORDERSTATUS: STRING, O_TOTALPRICE: DECIMAL(12, 2), O_ORDERDATE: STRING, O_ORDERPRIORITY: STRING, O_CLERK: STRING, O_SHIPPRIORITY: INT, O_COMMENT: STRING, O_LINEITEMS: ARRAY<STRUCT<L_PARTKEY: BIGINT, L_SUPPKEY: BIGINT, L_LINENUMBER: INT, L_QUANTITY: DECIMAL(12, 2), L_EXTENDEDPRICE: DECIMAL(12, 2), L_DISCOUNT: DECIMAL(12, 2), L_TAX: DECIMAL(12, 2), L_RETURNFLAG: STRING, L_LINESTATUS: STRING, L_SHIPDATE: STRING, L_COMMITDATE: STRING, L_RECEIPTDATE: STRING, L_SHIPINSTRUCT: STRING, L_SHIPMODE: STRING, L_COMMENT: STRING>>>>
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/CustomerMultiBlock/customer_multiblock.parquet'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
bzip2_tbl
---- COLUMNS
col string
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/data-bzip2.bz2'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
large_bzip2_tbl
---- COLUMNS
col string
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/large_bzip2.bz2'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
multistream_bzip2_tbl
---- COLUMNS
col string
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/data-pbzip2.bz2'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
large_multistream_bzip2_tbl
---- COLUMNS
col string
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/large_pbzip2.bz2'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
table_with_header
---- COLUMNS
c1 int
c2 double
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- ALTER
ALTER TABLE {table_name} SET TBLPROPERTIES('skip.header.line.count'='1');
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_with_header.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_with_header.gz'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
table_with_header_2
---- COLUMNS
c1 int
c2 double
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- ALTER
ALTER TABLE {table_name} SET TBLPROPERTIES('skip.header.line.count'='2');
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_with_header_2.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- DEPENDENT_LOAD_HIVE
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/table_with_header_2.gz'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
table_with_header_insert
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (i1 integer)
STORED AS {file_format}
TBLPROPERTIES('skip.header.line.count'='2');
====
---- DATASET
functional
---- BASE_TABLE_NAME
strings_with_quotes
---- COLUMNS
s string
i int
---- ROW_FORMAT
delimited fields terminated by ',' escaped by '\\'
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/strings_with_quotes.csv'
OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name};
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT s, i
FROM {db_name}.{table_name};
---- CREATE_KUDU
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
s string PRIMARY KEY,
i int
)
PARTITION BY HASH (s) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}
SELECT s, i
FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
part_strings_with_quotes
---- COLUMNS
i int
---- PARTITION_COLUMNS
p string
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (p="\"") VALUES (1);
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (p='\'') VALUES (2);
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (p="\\\"") VALUES (3);
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (p='\\\'') VALUES (4);
====
---- DATASET
functional
---- BASE_TABLE_NAME
manynulls
---- COLUMNS
id int
nullcol int
---- ALTER
-- Ensure the nulls are clustered together.
ALTER TABLE {table_name} SORT BY (id);
---- CREATE_KUDU
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name}_idx;
CREATE TABLE {db_name}{db_suffix}.{table_name}_idx (
kudu_idx BIGINT PRIMARY KEY,
id INT,
nullcol INT NULL
)
PARTITION BY HASH (kudu_idx) PARTITIONS 3 STORED AS KUDU;
CREATE VIEW {db_name}{db_suffix}.{table_name} AS
SELECT id, nullcol
FROM {db_name}{db_suffix}.{table_name}_idx;
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT id, nullcol
FROM {db_name}.{table_name};
---- DEPENDENT_LOAD_KUDU
INSERT into TABLE {db_name}{db_suffix}.{table_name}_idx
SELECT row_number() over (order by id),
id, nullcol
FROM {db_name}.{table_name};
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT id, if((id div 500) % 2 = 0, NULL, id) as nullcol
FROM functional.alltypesagg;
====
---- DATASET
functional
---- BASE_TABLE_NAME
chars_medium
---- COLUMNS
id int
date_char_col char(8)
char_col char(3)
date_varchar_col varchar(8)
varchar_col varchar(3)
---- DEPENDENT_LOAD
insert overwrite table {db_name}{db_suffix}.{table_name}
select id, date_char_col, char_col, date_varchar_col, varchar_col
from {db_name}.{table_name};
---- LOAD
insert overwrite table {db_name}{db_suffix}.{table_name}
select id, date_string_col, case when id % 3 in (0, 1) then string_col end, date_string_col, case when id % 3 = 0 then string_col end
from functional.alltypesagg;
====
---- DATASET
functional
---- BASE_TABLE_NAME
date_tbl
---- PARTITION_COLUMNS
date_part DATE
---- COLUMNS
id_col INT
date_col DATE
---- ALTER
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='0001-01-01');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='1399-06-27');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='2017-11-27');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='9999-12-31');
---- ROW_FORMAT
delimited fields terminated by ','
---- HBASE_REGION_SPLITS
'1','3','5','7','9'
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl/0000.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='0001-01-01');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl/0001.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='1399-06-27');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl/0002.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='2017-11-27');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl/0003.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='9999-12-31');
---- DEPENDENT_LOAD
insert overwrite table {db_name}{db_suffix}.{table_name} partition(date_part)
select id_col, date_col, date_part from functional.{table_name};
---- CREATE_KUDU
-- Can't create partitions with date_part since Kudu don't support "partition by"
-- with non key column.
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name};
CREATE TABLE {db_name}{db_suffix}.{table_name} (
id_col INT PRIMARY KEY,
date_col DATE NULL,
date_part DATE NOT NULL
)
PARTITION BY HASH (id_col) PARTITIONS 3 STORED AS KUDU;
---- DEPENDENT_LOAD_KUDU
INSERT INTO TABLE {db_name}{db_suffix}.{table_name}
SELECT id_col, date_col, date_part FROM {db_name}.{table_name};
====
---- DATASET
functional
---- BASE_TABLE_NAME
date_tbl_error
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (
id_col int,
date_col date)
partitioned by (date_part date)
row format delimited fields terminated by ',' escaped by '\\'
stored as {file_format}
LOCATION '{hdfs_location}';
USE {db_name}{db_suffix};
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='0001-01-01');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='1399-06-27');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='2017-11-27');
ALTER TABLE {table_name} ADD IF NOT EXISTS PARTITION(date_part='9999-12-31');
-- Create external temp table with desired file format with same data file location
-- Tmp tables must not specify an escape character we don't want any
-- data transformation to happen when inserting it into tmp tables.
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}_tmp (
id_col STRING,
date_col STRING)
PARTITIONED BY (date_part DATE)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS {file_format}
LOCATION '{hdfs_location}';
-- Make metastore aware of the partition directories for the temp table
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION(date_part='0001-01-01');
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION(date_part='1399-06-27');
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION(date_part='2017-11-27');
ALTER TABLE {table_name}_tmp ADD IF NOT EXISTS PARTITION(date_part='9999-12-31');
---- DEPENDENT_LOAD
USE {db_name}{db_suffix};
-- Step 4: Stream the data from tmp text table to desired format tmp table
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}_tmp PARTITION (date_part)
SELECT * FROM {db_name}.{table_name}_tmp;
-- Cleanup the temp table
DROP TABLE IF EXISTS {db_name}{db_suffix}.{table_name}_tmp;
---- LOAD
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl_error/0000.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='0001-01-01');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl_error/0001.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='1399-06-27');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl_error/0002.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='2017-11-27');
LOAD DATA LOCAL INPATH '{impala_home}/testdata/data/date_tbl_error/0003.txt' OVERWRITE INTO TABLE {db_name}{db_suffix}.{table_name} PARTITION(date_part='9999-12-31');
====
---- DATASET
functional
---- BASE_TABLE_NAME
insert_date_tbl
---- PARTITION_COLUMNS
date_part DATE
---- COLUMNS
id_col INT
date_col DATE
====
---- DATASET
functional
---- BASE_TABLE_NAME
hudi_partitioned
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
LIKE PARQUET '/test-warehouse/hudi_parquet/year=2015/month=03/day=16/5f541af5-ca07-4329-ad8c-40fa9b353f35-0_2-103-391_20200210090618.parquet'
PARTITIONED BY (year int, month int, day int)
STORED AS HUDIPARQUET
LOCATION '/test-warehouse/hudi_parquet';
ALTER TABLE {db_name}{db_suffix}.{table_name} RECOVER PARTITIONS;
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/hudi_parquet && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/hudi_parquet /test-warehouse/
====
---- DATASET
functional
---- BASE_TABLE_NAME
hudi_non_partitioned
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
LIKE PARQUET '/test-warehouse/hudi_parquet/year=2015/month=03/day=16/5f541af5-ca07-4329-ad8c-40fa9b353f35-0_2-103-391_20200210090618.parquet'
STORED AS HUDIPARQUET
LOCATION '/test-warehouse/hudi_parquet';
====
---- DATASET
functional
---- BASE_TABLE_NAME
hudi_as_parquet
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
LIKE PARQUET '/test-warehouse/hudi_parquet/year=2015/month=03/day=16/5f541af5-ca07-4329-ad8c-40fa9b353f35-0_2-103-391_20200210090618.parquet'
STORED AS PARQUET
LOCATION '/test-warehouse/hudi_parquet';
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_date_partition
---- PARTITION_COLUMNS
date_col date
---- COLUMNS
id int COMMENT 'Add a comment'
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
string_col string
timestamp_col timestamp
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (date_col)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col,
float_col, double_col, string_col, timestamp_col,
case when id % 2 = 0 then cast(timestamp_col as date)
else cast(cast(timestamp_col as date) + interval 5 days as date) end date_col
FROM {db_name}{db_suffix}.alltypes where id < 500;
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (date_col)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col,
float_col, double_col, string_col, timestamp_col,
case when id % 2 = 0 then cast(timestamp_col as date)
else cast(cast(timestamp_col as date) + interval 5 days as date) end date_col
FROM {db_name}{db_suffix}.alltypes where id < 500;
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_partitioned
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
LOCATION '/test-warehouse/iceberg_test/iceberg_partitioned'
TBLPROPERTIES('write.format.default'='parquet', 'iceberg.catalog'='hadoop.tables');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/iceberg_partitioned /test-warehouse/iceberg_test/
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_non_partitioned
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
LOCATION '/test-warehouse/iceberg_test/iceberg_non_partitioned'
TBLPROPERTIES('write.format.default'='parquet', 'iceberg.catalog'='hadoop.tables');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/iceberg_non_partitioned /test-warehouse/iceberg_test/
====
---- DATASET
functional
---- BASE_TABLE_NAME
hadoop_catalog_test_external
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
TBLPROPERTIES('write.format.default'='parquet', 'iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/iceberg_test/hadoop_catalog/hadoop_catalog_test',
'iceberg.table_identifier'='functional_parquet.hadoop_catalog_test');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test/hadoop_catalog && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/hadoop_catalog/hadoop_catalog_test /test-warehouse/iceberg_test/hadoop_catalog/
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_partitioned_orc_external
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
TBLPROPERTIES('write.format.default'='orc',
'iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/iceberg_test/hadoop_catalog/iceberg_partitioned_orc',
'iceberg.table_identifier'='functional_parquet.iceberg_partitioned_orc');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test/hadoop_catalog && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/hadoop_catalog/iceberg_partitioned_orc /test-warehouse/iceberg_test/hadoop_catalog/
====
---- DATASET
functional
---- BASE_TABLE_NAME
complextypestbl_iceberg_orc
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
TBLPROPERTIES('write.format.default'='orc', 'iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/iceberg_test/hadoop_catalog',
'iceberg.table_identifier'='ice.complextypestbl_iceberg_orc');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test/hadoop_catalog/ice && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/hadoop_catalog/ice/complextypestbl_iceberg_orc /test-warehouse/iceberg_test/hadoop_catalog/ice
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_resolution_test_external
---- CREATE
CREATE EXTERNAL TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
STORED AS ICEBERG
TBLPROPERTIES('write.format.default'='parquet', 'iceberg.catalog'='hadoop.catalog',
'iceberg.catalog_location'='/test-warehouse/iceberg_test/hadoop_catalog/iceberg_resolution_test',
'iceberg.table_identifier'='functional_parquet.iceberg_resolution_test');
---- DEPENDENT_LOAD
`hadoop fs -mkdir -p /test-warehouse/iceberg_test/hadoop_catalog && \
hadoop fs -put -f ${IMPALA_HOME}/testdata/data/iceberg_test/hadoop_catalog/iceberg_resolution_test /test-warehouse/iceberg_test/hadoop_catalog/
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_int_partitioned
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name} (i INT, j INT, k INT)
PARTITIONED BY SPEC (i, j)
STORED AS ICEBERG;
====
---- DATASET
functional
---- BASE_TABLE_NAME
iceberg_partition_transforms_zorder
---- CREATE
CREATE TABLE IF NOT EXISTS {db_name}{db_suffix}.{table_name}
(ts timestamp, s string, i int, j int)
PARTITIONED BY SPEC (year(ts), bucket(5, s))
SORT BY ZORDER (i, j)
STORED AS ICEBERG;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_date_partition_2
---- PARTITION_COLUMNS
date_col date
---- COLUMNS
id int COMMENT 'Add a comment'
bool_col boolean
tinyint_col tinyint
smallint_col smallint
int_col int
bigint_col bigint
float_col float
double_col double
string_col string
timestamp_col timestamp
---- DEPENDENT_LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (date_col)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col,
float_col, double_col, string_col, timestamp_col,
cast(timestamp_col as date) date_col
FROM {db_name}{db_suffix}.alltypes where id < 500;
---- LOAD
SET hive.exec.dynamic.partition.mode=nonstrict;
SET hive.exec.dynamic.partition=true;
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} PARTITION (date_col)
SELECT id, bool_col, tinyint_col, smallint_col, int_col, bigint_col,
float_col, double_col, string_col, timestamp_col,
cast(timestamp_col as date) date_col
FROM {db_name}{db_suffix}.alltypes where id < 500;
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_dp_2_view_1
---- CREATE
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
-- view which references a WHERE clause with hint
CREATE VIEW {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.alltypes_date_partition_2 where [always_true] date_col = cast(timestamp_col as date);
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
alltypes_dp_2_view_2
---- CREATE
DROP VIEW IF EXISTS {db_name}{db_suffix}.{table_name};
-- view which references a table with hint and a WHERE clause with hint.
-- WHERE clause has a compound predicate.
CREATE VIEW {db_name}{db_suffix}.{table_name}
AS SELECT * FROM {db_name}{db_suffix}.alltypes_date_partition_2 [convert_limit_to_sample(5)]
where [always_true] date_col = cast(timestamp_col as date) and int_col in (select int_col from {db_name}{db_suffix}.alltypessmall);
---- LOAD
====
---- DATASET
functional
---- BASE_TABLE_NAME
utf8_str_tiny
---- COLUMNS
id int
name string
---- DEPENDENT_LOAD_HIVE
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name}
SELECT id, name FROM {db_name}.{table_name};
---- LOAD
INSERT OVERWRITE TABLE {db_name}{db_suffix}.{table_name} VALUES
(1, "张三"), (2, "李四"), (3, "王五"), (4, "李小龙"), (5, "Alice"),
(6, "陈Bob"), (7, "Бopиc"), (8, "Jörg"), (9, "ひなた"), (10, "서연");
====
|
<reponame>DanDude0/MilwaukeeMakerspaceApi
/*
SQLyog Community
MySQL - 10.3.18-MariaDB-0+deb10u1 : Database - access_control
*********************************************************************
*/
/*!40101 SET NAMES utf8 */;
/*!40101 SET SQL_MODE=''*/;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
CREATE DATABASE /*!32312 IF NOT EXISTS*/`access_control` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */;
/*Table structure for table `attempt` */
CREATE TABLE `attempt` (
`attempt_id` int(11) NOT NULL AUTO_INCREMENT,
`keycode` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
`member_id` int(11) NOT NULL,
`reader_id` int(11) NOT NULL,
`access_granted` tinyint(1) NOT NULL DEFAULT 0,
`login` tinyint(1) NOT NULL DEFAULT 0,
`logout` tinyint(1) NOT NULL DEFAULT 0,
`action` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
`attempt_time` datetime NOT NULL,
PRIMARY KEY (`attempt_id`)
) ENGINE=MyISAM AUTO_INCREMENT=491889 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*Table structure for table `group` */
CREATE TABLE `group` (
`group_id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
PRIMARY KEY (`group_id`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*Table structure for table `group_member` */
CREATE TABLE `group_member` (
`group_id` int(11) NOT NULL,
`member_id` int(11) NOT NULL,
PRIMARY KEY (`member_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*Table structure for table `keycode` */
CREATE TABLE `keycode` (
`keycode_id` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`member_id` int(11) NOT NULL,
`updated` datetime NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*Table structure for table `member` */
CREATE TABLE `member` (
`member_id` int(11) NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`type` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`apricot_admin` tinyint(1) NOT NULL,
`joined` datetime NOT NULL,
`expires` datetime NOT NULL,
`updated` datetime NOT NULL,
PRIMARY KEY (`member_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*Table structure for table `network_event` */
CREATE TABLE `network_event` (
`network_event_id` int(11) NOT NULL AUTO_INCREMENT,
`reader_id` int(11) NOT NULL,
`online` tinyint(1) NOT NULL,
`event_time` datetime NOT NULL,
PRIMARY KEY (`network_event_id`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*Table structure for table `reader` */
CREATE TABLE `reader` (
`reader_id` int(11) NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`timeout` int(11) NOT NULL DEFAULT 10,
`enabled` tinyint(1) NOT NULL DEFAULT 1,
`group_id` int(11) NOT NULL DEFAULT 0,
`address` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
`version` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
`initialized` datetime NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp(),
`settings` varchar(8192) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
`status` varchar(4096) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '',
PRIMARY KEY (`reader_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
|
CREATE PROCEDURE [sp_ins_UserRole] (
@UserProfileID int,
@RoleID int
)
AS
INSERT INTO [dbo].[UserRole] (
[UserProfileID],
[RoleID]
)
VALUES (
@UserProfileID,
@RoleID
) |
<reponame>dmnpignaud/dbt-external-tables<gh_stars>10-100
{% macro spark__refresh_external_table(source_node) %}
{% set refresh %}
refresh table {{source(source_node.source_name, source_node.name)}}
{% endset %}
{% do return([refresh]) %}
{% endmacro %}
|
<filename>src/test/resources/mysql-script.sql
-- --------------------------------------------------------
-- 主机: 127.0.0.1
-- 服务器版本: 8.0.20 - MySQL Community Server - GPL
-- 服务器操作系统: Win64
-- HeidiSQL 版本: 11.0.0.5919
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8 */;
/*!50503 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- 导出 smartdb 的数据库结构
DROP DATABASE IF EXISTS `smartdb`;
CREATE DATABASE IF NOT EXISTS `smartdb` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `smartdb`;
-- 导出 表 smartdb.t_delete_sql_builder 结构
DROP TABLE IF EXISTS `t_delete_sql_builder`;
CREATE TABLE IF NOT EXISTS `t_delete_sql_builder` (
`id` bigint unsigned NOT NULL AUTO_INCREMENT,
`uuid` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- 正在导出表 smartdb.t_delete_sql_builder 的数据:~0 rows (大约)
DELETE FROM `t_delete_sql_builder`;
/*!40000 ALTER TABLE `t_delete_sql_builder` DISABLE KEYS */;
/*!40000 ALTER TABLE `t_delete_sql_builder` ENABLE KEYS */;
-- 导出 表 smartdb.t_enum 结构
DROP TABLE IF EXISTS `t_enum`;
CREATE TABLE IF NOT EXISTS `t_enum` (
`id` int NOT NULL AUTO_INCREMENT,
`user_name` varchar(50) NOT NULL DEFAULT '0',
`sex` varchar(50) NOT NULL DEFAULT '0',
`level` int NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- 正在导出表 smartdb.t_enum 的数据:~0 rows (大约)
DELETE FROM `t_enum`;
/*!40000 ALTER TABLE `t_enum` DISABLE KEYS */;
INSERT INTO `t_enum` (`id`, `user_name`, `sex`, `level`) VALUES
(1, 'world', 'GIRL', 2);
/*!40000 ALTER TABLE `t_enum` ENABLE KEYS */;
-- 导出 表 smartdb.t_insert_sql_builder 结构
DROP TABLE IF EXISTS `t_insert_sql_builder`;
CREATE TABLE IF NOT EXISTS `t_insert_sql_builder` (
`id` bigint NOT NULL,
`uuid` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`key` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`balance` decimal(10,3) DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- 正在导出表 smartdb.t_insert_sql_builder 的数据:~1 rows (大约)
DELETE FROM `t_insert_sql_builder`;
/*!40000 ALTER TABLE `t_insert_sql_builder` DISABLE KEYS */;
INSERT INTO `t_insert_sql_builder` (`id`, `uuid`, `create_time`, `key`, `balance`) VALUES
(2, NULL, '2020-09-08 15:09:38', NULL, NULL);
/*!40000 ALTER TABLE `t_insert_sql_builder` ENABLE KEYS */;
-- 导出 表 smartdb.t_replace_sql_builder 结构
DROP TABLE IF EXISTS `t_replace_sql_builder`;
CREATE TABLE IF NOT EXISTS `t_replace_sql_builder` (
`id` int unsigned NOT NULL,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`auto_increase_id` int NOT NULL AUTO_INCREMENT,
PRIMARY KEY (`auto_increase_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- 正在导出表 smartdb.t_replace_sql_builder 的数据:~1 rows (大约)
DELETE FROM `t_replace_sql_builder`;
/*!40000 ALTER TABLE `t_replace_sql_builder` DISABLE KEYS */;
INSERT INTO `t_replace_sql_builder` (`id`, `name`, `create_time`, `auto_increase_id`) VALUES
(1, 'testReplaceBuilder-replace', '2019-07-11 16:28:15', 2);
/*!40000 ALTER TABLE `t_replace_sql_builder` ENABLE KEYS */;
-- 导出 表 smartdb.t_select_sql_builder 结构
DROP TABLE IF EXISTS `t_select_sql_builder`;
CREATE TABLE IF NOT EXISTS `t_select_sql_builder` (
`id` bigint NOT NULL,
`login_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`nick_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`city` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`age` int DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`update_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- 正在导出表 smartdb.t_select_sql_builder 的数据:~1 rows (大约)
DELETE FROM `t_select_sql_builder`;
/*!40000 ALTER TABLE `t_select_sql_builder` DISABLE KEYS */;
INSERT INTO `t_select_sql_builder` (`id`, `login_name`, `name`, `nick_name`, `city`, `age`, `create_time`, `update_time`) VALUES
(1, NULL, NULL, 'login-name 1', 'city1', 2, '2018-08-11 00:00:00', '2018-08-11 00:00:00'),
(2, NULL, NULL, 'login-name 2', 'city2', 1, '2018-08-12 00:00:00', '2018-08-12 00:00:00'),
(3, NULL, NULL, 'login-name 3', 'city3', 1, '2018-08-13 00:00:00', '2018-08-13 00:00:00'),
(4, NULL, NULL, 'login-name 4', 'city4', 3, '2018-08-14 00:00:00', '2018-08-14 00:00:00'),
(5, NULL, NULL, 'login-name 5', 'city5', 3, '2018-08-15 00:00:00', '2018-08-15 00:00:00'),
(6, NULL, NULL, 'login-name 6', 'city6', 3, '2018-08-16 00:00:00', '2018-08-16 00:00:00'),
(7, NULL, NULL, 'login-name 7', 'city7', 3, '2018-08-17 00:00:00', '2018-08-17 00:00:00'),
(8, NULL, NULL, 'login-name 8', 'city8', 1, '2018-08-18 00:00:00', '2018-08-18 00:00:00'),
(9, NULL, NULL, 'login-name 9', 'city9', 2, '2018-08-19 00:00:00', '2018-08-19 00:00:00'),
(10, NULL, NULL, 'login-name 10', 'city10', 2, '2018-08-20 00:00:00', '2018-08-20 00:00:00'),
(11, NULL, NULL, 'login-name 11', 'city11', 3, '2018-08-21 00:00:00', '2018-08-21 00:00:00'),
(12, NULL, NULL, 'login-name 12', 'city12', 1, '2018-08-22 00:00:00', '2018-08-22 00:00:00'),
(13, NULL, NULL, 'login-name 13', 'city13', 1, '2018-08-23 00:00:00', '2018-08-23 00:00:00'),
(14, NULL, NULL, 'login-name 14', 'city14', 1, '2018-08-24 00:00:00', '2018-08-24 00:00:00'),
(15, NULL, NULL, 'login-name 15', 'city15', 1, '2018-08-25 00:00:00', '2018-08-25 00:00:00');
/*!40000 ALTER TABLE `t_select_sql_builder` ENABLE KEYS */;
-- 导出 表 smartdb.t_transaction 结构
DROP TABLE IF EXISTS `t_transaction`;
CREATE TABLE IF NOT EXISTS `t_transaction` (
`id` int NOT NULL AUTO_INCREMENT,
`name` varchar(50) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8;
-- 正在导出表 smartdb.t_transaction 的数据:~0 rows (大约)
DELETE FROM `t_transaction`;
/*!40000 ALTER TABLE `t_transaction` DISABLE KEYS */;
/*!40000 ALTER TABLE `t_transaction` ENABLE KEYS */;
-- 导出 表 smartdb.t_update_sql_builder 结构
DROP TABLE IF EXISTS `t_update_sql_builder`;
CREATE TABLE IF NOT EXISTS `t_update_sql_builder` (
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`tag` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`id` bigint unsigned NOT NULL AUTO_INCREMENT,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=588 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC;
-- 正在导出表 smartdb.t_update_sql_builder 的数据:~1 rows (大约)
DELETE FROM `t_update_sql_builder`;
/*!40000 ALTER TABLE `t_update_sql_builder` DISABLE KEYS */;
INSERT INTO `t_update_sql_builder` (`name`, `tag`, `create_time`, `id`) VALUES
('name 0', 'tag0', '2020-09-08 15:11:32', 578),
('name 1', 'tag1', '2020-09-08 15:11:32', 579),
('hi', 'update-tag', '2020-09-08 15:11:32', 580),
('name 3', 'tag3', '2020-09-08 15:11:32', 581),
('name 4', 'tag4', '2020-09-08 15:11:32', 582),
('name 5', 'tag5', '2020-09-08 15:11:32', 583),
('name 6', 'tag6', '2020-09-08 15:11:32', 584),
('name 7', 'tag7', '2020-09-08 15:11:32', 585),
('name 8', 'tag8', '2020-09-08 15:11:32', 586),
('name 9', 'tag9', '2020-09-08 15:11:32', 587);
/*!40000 ALTER TABLE `t_update_sql_builder` ENABLE KEYS */;
-- 导出 表 smartdb.t_user 结构
DROP TABLE IF EXISTS `t_user`;
CREATE TABLE IF NOT EXISTS `t_user` (
`id` int NOT NULL AUTO_INCREMENT,
`user_name` varchar(50) DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- 正在导出表 smartdb.t_user 的数据:~0 rows (大约)
DELETE FROM `t_user`;
/*!40000 ALTER TABLE `t_user` DISABLE KEYS */;
/*!40000 ALTER TABLE `t_user` ENABLE KEYS */;
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
<reponame>opitzconsulting/orcas<gh_stars>10-100
create table VARRAY_TABLE_NEW ( ID NUMERIC, COL1 T_VARCHARARRAY ) varray COL1 store as securefile lob ;
alter table VARRAY_TABLE_ADD add COL1 T_VARCHARARRAY varray COL1 store as securefile lob ( deduplicate compress high );
|
<filename>spring-data-jdbc/src/test/resources/org.springframework.data.jdbc.repository/JdbcRepositoryWithListsIntegrationTests-oracle.sql
DROP TABLE ELEMENT;
DROP TABLE DUMMY_ENTITY;
CREATE TABLE DUMMY_ENTITY (
ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY,
NAME VARCHAR2(100)
);
CREATE TABLE ELEMENT (
ID NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY PRIMARY KEY,
CONTENT VARCHAR(100),
DUMMY_ENTITY_KEY NUMBER,
DUMMY_ENTITY NUMBER
);
|
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for `t_user`
-- ----------------------------
DROP TABLE IF EXISTS `t_user`;
CREATE TABLE `t_user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`account` varchar(32) DEFAULT NULL,
`password` varchar(32) DEFAULT NULL,
`name` varchar(32) DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of t_user
-- ----------------------------
INSERT INTO `t_user` VALUES ('2', 'lance', '<PASSWORD>', 'Lance', '2016-06-02 23:35:38');
INSERT INTO `t_user` VALUES ('1', 'admin', '<PASSWORD>', 'Admin', '2016-06-01 23:35:17');
DROP TABLE IF EXISTS `t_news`;
CREATE TABLE `t_news` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(200) DEFAULT NULL,
`summary` varchar(500) DEFAULT NULL,
`author` varchar(30) DEFAULT NULL,
`content` varchar(5000) DEFAULT NULL,
`status` int(1) DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`update_time` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
INSERT INTO `t_news` VALUES ('1', 'Gecco 1.2.0 发布,支持运行时抓取规则配置', 'Gecco的动态编程是新版本的核心功能,Gecco支持动态生成SpiderBean包括类,属性,注解。可以不需要预先定义SpiderBean即可完成抓取', 'Lucky', '<p style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n Gecco 1.2.0 发布了,该版本改进内容包括:\r\n</p>\r\n<p style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n 1.HttpClientDownloader保证inputstream能重复使用,<a href=\"https://github.com/shangjian\">@shangjian</a>提供修改思路\r\n</p>\r\n<p style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n 2.支持一个Before/AfterDownloader对应多个SpiderBean\r\n</p>\r\n<p style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n 3.Gecco的动态编程是新版本的核心功能,Gecco支持动态生成SpiderBean包括类,属性,注解。可以不需要预先定义SpiderBean即可完成抓取。详细情况可以参考<a href=\"http://my.oschina.net/u/2336761/blog/706041\">http://my.oschina.net/u/2336761/blog/706041</a>\r\n</p>\r\n<p style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>通过动态特性,可以实现如下功能:</strong>\r\n</p>\r\n<ol class=\" list-paddingleft-2\" style=\"color:#000000;font-family:"font-size:14px;font-style:normal;font-weight:normal;text-align:left;text-indent:0px;background-color:#FFFFFF;\">\r\n <li>\r\n <p>\r\n 已经定义了ORM(如:hiberante)的bean,将注解动态的加载到ORM的bean中,可以很方便的将页面格式化后入库\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n 很多类似的网站的抓取,SpiderBean都一样,只是提取元素的cssPath不一样,为了不构建很多重复的SpiderBean,可以考虑动态生成SpiderBean\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n 通过配置的方式抓取页面,通过后台管理系统、配置文件等配置抓取规则,动态的将配置规则转换成SpiderBean\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n 利用动态SpiderBean可以构建可视化爬虫,利用可视化工具构建抓取规则,将规则动态转换为SpiderBean\r\n </p>\r\n </li>\r\n</ol>', '1', '2016-07-06 00:05:50', null);
INSERT INTO `t_news` VALUES ('2', '那些值得你试试的Android竞品分析工具', '本文整理了一些自己在开发过程中经常会用到的竞品分析工具,这些工具可以帮助分析竞品', 'Tom', '<h3 style=\"font-family:"font-weight:500;color:#333333;font-size:24px;font-style:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 前言\r\n</h3>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 本文整理了一些自己在开发过程中经常会用到的竞品分析工具,这些工具可以帮助分析竞品。让我们得以了解竞品相应的一些技术信息,例如:代码质量、某种业务的实现方式、用了什么第三方库等。除此之外,也有一些高端玩家会玩起 HOOK ,更有甚者是通过修改代码然后进行二次打包。当然这些损害开发者利益的事情,是不值得提倡的。但如果只是出于学习的目的,我是十分建议多折腾的。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>提前声明:</strong>\r\n</p>\r\n<ul style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <li>\r\n 本文只对工具做简要功能介绍,要求面面俱到讲解每个工具使用,本人表示能力有限啊;\r\n </li>\r\n <li>\r\n 下文所介绍的工具,都会附上这些工具的官方地址以及相应的使用教程链接(如果有);\r\n </li>\r\n <li>\r\n 有童鞋对下文提到的工具已经用得出神入化,欢迎写成文章,可以的话,也欢迎给个链接让我补充进本文,顺带学习一下;\r\n </li>\r\n <li>\r\n 本文所有提到的工具只做分析学习使用,请不要拿去做损害他人利益的事情;\r\n </li>\r\n</ul>\r\n<h3 style=\"font-family:"font-weight:500;color:#333333;font-size:24px;font-style:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Apk 内部结构\r\n</h3>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 为了方便介绍工具,需要先简单科普一下 Apk 的内部结构,已经很熟悉的童鞋可以忽略此章节。需要注意的是,这里介绍的 Apk 结构并不包含加固的情况,虽然很多厂家推出了加固服务用于对抗反编译,但是加固也有诸多的问题存在,另外基本上分析的大厂应用都没有发现有加固的,可能也是考虑到加固后安装包存在的诸多问题吧。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 直接使用 Android Studio 创建一个 HelloWorld 的 Moudle,然后打个 release 的 Apk 安装包,并修改后缀 apk 为 zip 后进行解压,可以看到下面一个标准的结构:\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <img src=\"http://img.blog.csdn.net/20160705104032699\" alt=\"\" title=\"\" style=\"border:0px;\" />\r\n</p>\r\n<ul style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <li>\r\n META-INF: 存放签名文件签名信息的目录,用于系统签名校验;\r\n </li>\r\n <li>\r\n res: 存放资源文件的目录,包含项目中的 xml 和 图片资源等;\r\n </li>\r\n <li>\r\n AndroidManifest.xml: Android项目中的配置文件;\r\n </li>\r\n <li>\r\n classes.dex: 由Java产生的字节码文件打包生成为虚拟机可以解读的字节码文件,所有的源码都在其中;\r\n </li>\r\n <li>\r\n resources.arsc: 资源文件的ID索引表,如:layout、drawable、mipmap都会在R文件生成相应的ID资源;\r\n </li>\r\n <li>\r\n 其他目录:开发者自行添加的目录,如:存放资源的 asserts 、存放依赖包的 lib 目录等。\r\n </li>\r\n</ul>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 上面介绍完了一个最基本的 Apk 解压后的目录结构,下面直接拿微信作为示例,看看大厂应用的结构是怎样的:\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <img src=\"http://img.blog.csdn.net/20160705104257793\" alt=\"\" title=\"\" style=\"border:0px;\" />\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 我们可以看到微信除包含了上面提到的,还有 asserts、lib、r 这三个自行添加的目录,至于前两个目录是干嘛的上面已经提到,r 目录里面主要存放了一些 svg 和 xml 文件,有兴趣可以自行试试。如果要问为什么微信有3个dex文件的话,只能说它超了 Android 系统设定 65k 方法的限制,所以有多个dex包。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n OK,关于 Apk 的目录结构介绍基本到此,这有助于我们去理解下面即将要介绍的工具!\r\n</p>\r\n<h3 style=\"font-family:"font-weight:500;color:#333333;font-size:24px;font-style:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Apktool\r\n</h3>', '1', '2016-07-06 00:08:37', null);
INSERT INTO `t_news` VALUES ('3', 'MongoDB和数据流:实现一个MongoDB Kafka消费者', '仲培艺,关注数据库领域,纠错、寻求报道或者投稿请致邮', 'Tom', '<h2 style=\"font-family:"font-weight:500;color:#333333;font-size:30px;font-style:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 数据流\r\n</h2>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 在当前的数据领域,单独一个系统无法支撑所有的请求。想要分析数据,则需要来源多样的海量信息数据。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 同时,我们迫不及待地渴求着答案;如果洞悉一切所需的时间超过了数十毫秒,信息就失去了价值——类似于高频交易、欺诈侦测和推荐引擎这一类应用程序,更是经不起这样的等待消耗。这通常要求在流入的数据被存入数据库之前,就对其进行分析。对数据丢失的零容忍和更多挑战的出现,无疑使其更为棘手。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Kafka和数据流侧重于从多元fire-hose中获取大量数据并将其分输至需要这些数据的系统——通过筛选、聚合和分析的方法。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 这篇博文介绍了Apache Kafka,并举例分析了如何将MongoDB用作流式数据的源(生产者)或目标(消费者)。关于这一主题,<a href=\"https://www.mongodb.com/collateral/data-streaming-with-apache-kafka-and-mongodb\" target=\"_blank\">数据流和Kafka & MongoDB</a>白皮书提供了更为完备的研究。\r\n</p>\r\n<h2 style=\"font-family:"font-weight:500;color:#333333;font-size:30px;font-style:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Apache Kafka\r\n</h2>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Kafka提供了一个灵活、可扩展且可靠的方法,用以在一个或多个生产者与消费者之间进行事件数据流交流。事件例子包括:\r\n</p>\r\n<ul style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <li>\r\n <p>\r\n 周期性的传感器读数,如当前温度\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n 用户在网上商店向购物车中添加商品\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n 发送带有特定标签的推文\r\n </p>\r\n </li>\r\n</ul>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Kafka事件流被归纳为几个主题。每个生产者选择一个主题来发送指定事件,而消费者则根据所需主题来提取事件。例如,一个财经应用可以根据一个标题来提取关于纽约证券交易所(NYSE)股票交易事件;若为求交易机会,则可根据另一个标题来提取公司财务报告。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Kafka中的标题被进一步细分为支持扩展的分区。每一个Kafka节点(代理)负责接收、存储并传递来自指定主题一个或多个分区的事件。按照这个方法,一个主题的处理和存储可以线性扩展覆盖多个代理。也可以通过相似的方法来扩展一个应用——让多个消费者根据一个指定标题来提取时间,每一个事件都来源自独立分区。\r\n</p>', '1', '2016-07-07 12:03:56', null);
INSERT INTO `t_news` VALUES ('4', 'MongoDB推出了新型DaaS解决方案“Altas”,提供数据库托管服务', 'Atlas操作简便,无需构建、配置或管理服务器;无需备份调度', 'Tom', '<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 日前,<a href=\"https://www.mongodb.com/cloud\" target=\"_blank\">MongoDB 推出了Atlas</a>,一个新型DaaS解决方案,它在云上运行MongoDB非常简便、活力且节约成本。无论运行的是一个单机副本集还是一个负载百兆字节的分片集群,Atlas作为一个服务于MongoDB的数据库,都可帮助其轻松运行。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>简便性:</strong>\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Atlas操作简便,无需构建、配置或管理服务器;无需备份调度;也无需建立监控或查找安全漏洞。若服务器夜间性能下降,则该系统会对其进行维护。而若出现了一些无法自动解决的问题,也由全天候服务的响应团队帮助用户出面解决。面对其它一些需求,如扩大存储、扩展运行或是添加分片,用户只需要轻敲用户界面(UI),剩余事项就可交由Atlas处理。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>有活力:</strong>\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n MongoDB Atlas是基于当前云产品组成部分的构建,这些部分长年来负责管理集群。其中,监控部分于四年前首发。同时,Atlas还配备有对MongoDB来说最为先进的备份解决方案。这一方案发布于三年前,支持时间点(point in time)恢复,支持强大的配置选项,而MongoDB多年来对工作负载备份的高要求,也证实了其稳定性。其自两年前推广可用以来,支持Atlas集群,并对Automation进行管理,历经迭代。因此,虽然Atlas自身是一个新产品,但其所有组成部分都已经历经了多年的挑战与测试。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>弹性价格:</strong>\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 总的来说,云产品的一大亮点就是其弹性的价格,而产品开发者也想其DaaS具有这一优点。使用Atlas的用户只需要为其真正用到的部分买单,而计费账单则按月结算。这样用户只需要花费几美元就可以体验几个小时的集群启动,还能够无需忧虑地以一个预想的价格,最大程度地运行其最为关键的工作负载。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>其他观点:</strong>\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n MongoDB Atlas无疑是在云上运行MongoDB的最佳方案。然而,若用户不希望运行全部在云上完成,则将为其提供其他方案,以便在其选定的环境下管理MongoDB。如果用户运行的是一个混合环境,则需查看<a href=\"https://www.mongodb.com/cloud\" target=\"_blank\">MongoDB云管理器</a>;如果完全是On-Prem,则需尝试<a href=\"https://www.mongodb.com/products/ops-manager\" target=\"_blank\">MongoDB Ops管理器</a>。这三个管理产品共享相同的强大组成部分,确保用户不受运行环境的干扰。产品覆盖了所有部署MongoDB的场景。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 如今,<a href=\"https://www.mongodb.com/cloud\" target=\"_blank\">MongoDB Atlas</a>已支持AWS生产,而且在不久的将来,还会和战略合作伙伴Microsoft Azure和Google Cloud Platform协同推出。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 一直以来MongoDB都致力于促进数据库发展并助力开发者团队进行应用程序开发。现在,Atlas使这一切在云上的进行变得更为顺畅。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>关于作者——<NAME></strong>\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Eliot,MongoDB CTO &联合创始人,也是MongoDB内核开发核心贡献者,曾是ShopWiki联合创始人& CTO。此前,入选了美国商业周刊评选出的2006年度全国25岁以下的企业家前二十五强。Eliot还在美国布朗大学获得了计算机科学理学学士学位。\r\n</p>', '1', '2016-07-07 12:04:36', null);
INSERT INTO `t_news` VALUES ('5', '论物联网四项设计挑战的最佳解决方案', '', 'Lucky', '<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 无论是智能住宅、联网汽车还是智能工厂,所有智能技术的核心都是设备间的网络互联,而这正是我们耳熟能详的<a href=\"https://en.wikipedia.org/wiki/Internet_of_things\" target=\"_blank\">物联网</a>(IoT)。IoT发展过程中不断改善人们生活和交互的方式。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 企业可以收集有效的信息来简化操作流程、预测重大变化、和确保满足客户实时需求。用户可以具有更加智能化的生活方式而不是在繁琐的事物上浪费时间。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n IoT具有很大的前景,但面向开发者,构建IoT系统时将面临一些独特的挑战。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>电池寿命受限</strong><span class=\"Apple-converted-space\"> </span> \r\n</p>\r\n<br />\r\n<img src=\"http://img.blog.csdn.net/20160705164106874\" alt=\"图片描述\" title=\"\" style=\"border:0px;\" /> \r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 以平板电脑为例,如手机一般大小的电脑。虽然功能齐全,但大屏幕不一定方便,而且屏幕越大的智能硬件需要更大的电池。倘若电脑尺寸更小一些,其所需的电池量也是一样的。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 虽然看起来电池寿命问题归属硬件工程师领域,但是一些UX和专业的软件开发方法也可以有效提高设备的电池寿命:\r\n</p>\r\n<ul style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <li>\r\n <p>\r\n <strong>使用黑色:</strong>在AMOLED屏幕(无须背光模块)中黑色像素促使电池寿命最大化。一般来说,显示屏相对比之下鲜艳的颜色比深色需要消耗更多的电量。\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n <strong>尽可能使用JPEG:</strong>尽管PNG因其灵活性和支持透明度受到的更多的欢迎,但JPEG仍然为较好的压缩格式。据<a href=\"http://mobisocial.stanford.edu/papers/boneh-www2012.pdf\" target=\"_blank\">斯坦福大学研究</a>表明JPEG标准比PNG更能缩少电量消耗。\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n <strong>缩减网络请求:</strong>虽然有些时候需要实时查询数据的连通性,但是需要谨慎地运用这些技术,尤其是对于加密的数据。\r\n </p>\r\n </li>\r\n <li>\r\n <p>\r\n <strong>减少JavaScript:</strong>应用程序中有很多带宽/耗电的因素,其中最大的是Javascript的使用,当浏览器遇到<script><span class=\"Apple-converted-space\"> </span>标签时,脚本代码会优先运行。\r\n </p>\r\n </li>\r\n</ul>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n <strong>数据管理:“抓住一切”并不是最终结果</strong> \r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 为了真正充分利用IoT系统,需要确保所提供的功能保证一切安全。当涉及到大数据,正如<a href=\"http://www.forbes.com/forbes/welcome/#4555b09a74e4\" target=\"_blank\">Forbes</a>所述,座右铭“如果你不能度量它,你就不能管理它,“在IoT领域真的很实用。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 目前是软件开发者常犯得错误是在没有考虑实际目的情况下收集尽可能的数据。回归电池寿命,想要节省电池量,仅需收集数据处理中有限的数据。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 在数据安全方面,最终需要在开放的生态系统处理,另外这也是一个新领域仍然在不断地被开发。因此,你需要了解行业趋势。与其他手机开发项目相比,在所需基础上保证用户权限可以有效保证设备安全。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 当然,数字威胁并不是唯一值得担心的。如果你管理用户的个人数据,需要确保从<a href=\"https://www.sitepoint.com/protect-yourself-and-your-business-from-social-engineering/\" target=\"_blank\">社会工程攻击</a>中受到保护。\r\n</p>', '1', '2016-07-07 12:05:29', null);
INSERT INTO `t_news` VALUES ('6', '蜕变还是改变?且看Spark 2.0测评', 'park 2.0对TPC-DS的支持度确实比Spark 1.6更好。Spark SQL 1.6版本只能运行大约70个查询,而2.0预览版可以运行99个SQL。在SPARK-12540这个JIRA中,可以看到一些相关的TPC-DS需要的SQL功能点都在陆续的完成中', 'Lucky', '<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n Databricks近期推出了Spark 2.0的预览版,开源社区对这个版本有非常高的期待,即使是预览版也有很多的下载试用。Spark版本号从1.x演进到2.0,可以看出社区对这个版本寄予厚望。在对外发布的<a href=\"https://databricks.com/blog/2016/05/11/apache-spark-2-0-technical-preview-easier-faster-and-smarter.html\" target=\"_blank\">官方blog</a>中,Easier/Faster/Smarter是其宣传的几个主要创新领域。笔者把它通俗的翻译为:SQL支持更全,更强大的性能,适配流式计算和批处理的统一编程模型。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 从实用性的角度,这几个功能确实是企业用户特别看重的内容,说明Databricks以及社区开始重视商业需求,并期望Spark向商业产品化的方向转变。那么实际情况到底是怎样呢?和一些商业大数据产品,如星环TDH的比较又如何呢?我们来做一些深度的测评来对比下。\r\n</p>\r\n测试环境的准备\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 首先看下我们的测试环境情况,考虑到Spark的内存管理机制不够完善,我们使用了128GB的内存配置,尽量减少因为Spark SQL的稳定性问题给整个测试带来的负面影响。测试集群包含4台同构的x86服务器,每台配置如下:\r\n</p>\r\n<blockquote style=\"font-size:14px;color:#333333;font-family:"font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background:#F7F7F7;\">\r\n <p>\r\n CPU:2X6 core, E5 2620 v2, 2.10GHz<span class=\"Apple-converted-space\"> </span><br />\r\n内存:128G<span class=\"Apple-converted-space\"> </span><br />\r\n网络:千兆网卡<span class=\"Apple-converted-space\"> </span><br />\r\n磁盘:3X3T\r\n </p>\r\n</blockquote>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 然而实际测试中,我们发现Spark SQL的稳定性依然是个很大的问题。为了测试出很好的对比数据,我们花费了比预期多5倍的时间。系统仍然有很多不稳定运行的问题,尤其是在连续运行批量的业务方面,我们几乎没有在一次连续测试中跑完所有的SQL,最后不得不从多次运行benchmark的结果中选出每个查询最好的数据作为Spark SQL的最终性能报告数据源。虽然这个测试方法因为Spark 2.0 预览版本质量问题而显的不正规,但是还是能够帮助我们去理解这个版本的一些现实状况。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 同时为了对比,我们选用TDH 4.5版本来做功能以及性能验证的样板。基本测试程序也选用Databricks Blog上提到的TPC-DS。\r\n</p>\r\nSQL支持度\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 首先值得肯定的是,Spark 2.0对TPC-DS的支持度确实比Spark 1.6更好。Spark SQL 1.6版本只能运行大约70个查询,而2.0预览版可以运行99个SQL。在SPARK-12540这个JIRA中,可以看到一些相关的TPC-DS需要的SQL功能点都在陆续的完成中。\r\n</p>\r\n<p style=\"color:#333333;font-family:"font-size:16px;font-style:normal;font-weight:normal;text-align:justify;text-indent:0px;background-color:#FFFFFF;\">\r\n 从相应的JIRA记录中,我们可以看到Spark 2.0在SQL兼容实现上的主要思路。在Catalyst中为不同的SQL功能增加相应的变换器,将其变成有相同语义的已经实现的SQL执行计划。这个实现方式总体上不错,但同时也有很多的局限性,譬如比较难以处理各种corner case,对子查询的支持比较差,只能运行简单场景下的子查询,此外一些SQL可能会触发多种变换器,从而带来一些不是期望中的结果。正如Databricks的开发人员在各自JIRA上对这些限制的一些描述\r\n</p>', '1', '2016-07-07 12:06:10', null);
|
{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}
{%- set source_tz = "UTC" if not source_tz else source_tz -%}
{%- set target_tz = var("dbt_date:time_zone") if not target_tz else target_tz -%}
{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}
{%- endmacro -%}
{% macro default__convert_timezone(column, target_tz, source_tz) -%}
{%- if not source_tz -%}
cast(convert_timezone('{{ target_tz }}', {{ column }}) as {{ dbt_utils.type_timestamp() }})
{%- else -%}
cast(convert_timezone('{{ source_tz }}', '{{ target_tz }}', {{ column }}) as {{ dbt_utils.type_timestamp() }})
{%- endif -%}
{%- endmacro -%}
{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}
timestamp(datetime({{ column }}, '{{ target_tz}}'))
{%- endmacro -%}
{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}
from_utc_timestamp(
to_utc_timestamp({{ column }}, '{{ source_tz }}'),
'{{ target_tz }}'
)
{%- endmacro -%}
{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}
{%- if source_tz -%}
cast({{ column }} at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt_utils.type_timestamp() }})
{%- else -%}
cast({{ column }} at time zone '{{ target_tz }}' as {{ dbt_utils.type_timestamp() }})
{%- endif -%}
{%- endmacro -%}
|
<gh_stars>0
-- FUNCTION: public.Trig1_$%{}[]()&*^!@"'`\/#()
-- DROP FUNCTION public."Trig1_$%{}[]()&*^!@""'`\/#"();
CREATE FUNCTION public."Trig1_$%{}[]()&*^!@""'`\/#"()
RETURNS trigger
LANGUAGE 'plpgsql'
COST 123
IMMUTABLE LEAKPROOF STRICT SECURITY DEFINER
SET application_name='appname2'
SET search_path=public, pg_catalog
SET array_nulls='true'
AS $BODY$begin
select 1;
end;$BODY$;
ALTER FUNCTION public."Trig1_$%{}[]()&*^!@""'`\/#"()
OWNER TO postgres;
COMMENT ON FUNCTION public."Trig1_$%{}[]()&*^!@""'`\/#"()
IS 'some comment';
|
<filename>t/ddl/schema3.ddl
CREATE TABLE users (
id INTEGER PRIMARY KEY,
login_name TEXT UNIQUE NOT NULL,
passphrase TEXT NOT NULL,
name TEXT,
activated INTEGER
);
CREATE TABLE groups (
id INTEGER PRIMARY KEY,
group_name TEXT UNIQUE NOT NULL
);
CREATE TABLE memberships (
id INTEGER PRIMARY KEY,
user_id INTEGER NOT NULL REFERENCES users (id),
group_id INTEGER NOT NULL REFERENCES groups (id)
);
CREATE VIEW roles AS
SELECT login_name, group_name AS role
FROM users
LEFT JOIN memberships ON users.id = memberships.user_id
LEFT JOIN groups ON groups.id = memberships.group_id;
CREATE UNIQUE INDEX login_name ON users (login_name);
CREATE UNIQUE INDEX group_name ON groups (group_name);
CREATE UNIQUE INDEX user_group ON memberships (user_id, group_id);
CREATE INDEX member_user ON memberships (user_id);
CREATE INDEX member_group ON memberships (group_id);
INSERT INTO users VALUES (1, 'bananarepublic', '{SSHA}5gKaJEMxoJZbevrKz452MN31zzLF04Ps', 'Bananas', 1);
|
<filename>sql1_persediaan/persediaan_rekap_sql/persediaan_barang5_batumandi.sql<gh_stars>1-10
DROP VIEW IF EXISTS view_persediaan_barang5_batumandi CASCADE;
CREATE VIEW view_persediaan_barang5_batumandi AS
SELECT
nama_provinsi,
id_provinsi,
nama_kabupaten,
id_kabupaten,
nama_lokasi_bidang,
id_lokasi_bidang,
nama_skpd,
id_skpd,
nama_barang,
kode_barang,
satuan,
id_satuan,
jenis_barang,
id_jenis_barang,
sum(saldo) AS total_saldo_barang,
sum(jumlah_harga) AS total_harga
FROM
view_persediaan_barang4_batumandi
WHERE
1 = 1 AND
id_skpd = 32
GROUP BY
nama_provinsi,
id_provinsi,
nama_kabupaten,
id_kabupaten,
nama_lokasi_bidang,
id_lokasi_bidang,
nama_skpd,
id_skpd,
nama_barang,
kode_barang,
satuan,
id_satuan,
jenis_barang,
id_jenis_barang
;
GRANT ALL PRIVILEGES ON view_persediaan_barang5_batumandi TO lap_batumandi;
REVOKE INSERT, UPDATE, DELETE ON view_persediaan_barang5_batumandi FROM lap_batumandi;
|
--============================================================================
USE [TechMarket];
GO
------------------------------------------------------------------------------
IF EXISTS(
SELECT *
FROM INFORMATION_SCHEMA.ROUTINES
WHERE [ROUTINE_NAME] = 'spCart_UpdateProduct'
AND [ROUTINE_TYPE] = 'PROCEDURE'
AND [ROUTINE_BODY] = 'SQL'
AND [SPECIFIC_SCHEMA] = 'dbo')
BEGIN
DROP PROCEDURE dbo.spCart_UpdateProduct;
END
GO
------------------------------------------------------------------------------
SET ANSI_NULLS ON;
SET QUOTED_IDENTIFIER ON;
SET ANSI_PADDING ON;
GO
--============================================================================
CREATE PROCEDURE spCart_UpdateProduct
@UserId INT,
@ProductId INT,
@Quantity INT
AS
BEGIN
SET NOCOUNT ON
--=========================================================================
-- Validation:
--=========================================================================
IF @UserId <= 0
BEGIN
RAISERROR ('Must pass a valid user id.', 11, 1);
RETURN -1;
END
IF @ProductId <= 0
BEGIN
RAISERROR ('Must pass a valid product id.', 11, 2);
RETURN -1;
END
IF @Quantity <= 0
BEGIN
RAISERROR ('Must pass a valid quantity', 11, 3);
RETURN -1;
END
IF NOT EXISTS (SELECT 1 FROM dbo.Users WHERE [Id] = @UserId)
BEGIN
RAISERROR ('User was not found.', 11, 4);
RETURN -1;
END
IF NOT EXISTS (SELECT 1 FROM dbo.Products WHERE [Id] = @ProductId)
BEGIN
RAISERROR ('Product was not found.', 11, 5);
RETURN -1;
END
--========================================================================
-- Update:
--========================================================================
UPDATE dbo.CartProducts
SET [Quantity] = @Quantity
WHERE UserId = @UserId
AND ProductId = @ProductId;
END
GO
--============================================================================ |
<reponame>altostratous/forsat<filename>triggers.sql
-- procedure to ignore update
CREATE FUNCTION ignore_manipulation() RETURNS trigger AS $ignore_manipulation$
BEGIN
RETURN NULL;
END;
$ignore_manipulation$ LANGUAGE plpgsql;
-- triggers to prevent bad parent folders
CREATE TRIGGER parent_path_update_validity
BEFORE UPDATE OF path ON Folder
FOR EACH ROW
WHEN (NOT NEW.path LIKE NEW.child_of_path || '%')
EXECUTE PROCEDURE ignore_manipulation();
CREATE TRIGGER parent_path_insert_validity
BEFORE INSERT ON Folder
FOR EACH ROW
WHEN (NOT NEW.path LIKE NEW.child_of_path || '%')
EXECUTE PROCEDURE ignore_manipulation();
-- procedure to ignore bad reminder times
CREATE FUNCTION ignore_bad_reminder_time() RETURNS trigger AS $ignore_bad_reminder_time$
BEGIN
IF (SELECT predicted_time + predicted_duration FROM Task WHERE id = NEW.id) < NEW.time THEN
RETURN NULL;
END IF;
IF (SELECT real_time + real_duration FROM Task WHERE id = NEW.id) < NEW.time THEN
RETURN NULL;
END IF;
IF NEW.time < current_timestamp THEN
RETURN NULL;
END IF;
RETURN NEW;
END;
$ignore_bad_reminder_time$ LANGUAGE plpgsql;
-- triggers to prevent invalid reminders
CREATE TRIGGER reminder_time_insert_validity
BEFORE INSERT ON Reminder
FOR EACH ROW EXECUTE PROCEDURE ignore_bad_reminder_time();
CREATE TRIGGER reminder_time_update_validity
BEFORE UPDATE OF time ON Reminder
FOR EACH ROW EXECUTE PROCEDURE ignore_bad_reminder_time();
-- triggers to redirect delete and update from list to folder
-- CREATE FUNCTION delete_from_list() RETURNS trigger AS $$
-- BEGIN
-- DELETE FROM folder WHERE OLD.email = email AND OLD.path = path;
-- RETURN OLD;
-- END;
-- $$ LANGUAGE plpgsql;
--
-- CREATE FUNCTION update_list() RETURNS trigger AS $$
-- BEGIN
-- UPDATE folder SET path = NEW.path, email = NEW.email WHERE email = OLD.email AND path = OLD.path;
-- RETURN NEW;
-- END;
-- $$ LANGUAGE plpgsql;
--
-- CREATE TRIGGER list_delete_redirection
-- AFTER DELETE ON list
-- FOR EACH ROW EXECUTE PROCEDURE delete_from_list();
--
-- CREATE TRIGGER list_update_redirection
-- BEFORE UPDATE ON list
-- FOR EACH ROW EXECUTE PROCEDURE update_list();
-- Assigning personal tasks automatically
CREATE FUNCTION assign_personal_tasks() RETURNS trigger AS $$
BEGIN
IF NOT EXISTS(SELECT * FROM sharedfolders WHERE path = NEW.path AND owner_email = NEW.email) THEN
NEW.assigned_user_email = NEW.email;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER task_insert_validity
BEFORE INSERT ON task
FOR EACH ROW
EXECUTE PROCEDURE assign_personal_tasks();
-- Task availability check
CREATE FUNCTION check_task_availability() RETURNS trigger AS $$
BEGIN
IF NEW.id IN (
SELECT id FROM task NATURAL JOIN sharedfolders WHERE
sharedfolders.owner_email = NEW.assigned_user_email OR
sharedfolders.user_email = NEW.assigned_user_email
) THEN
RETURN NEW;
ELSE
RETURN NULL;
END IF;
END;
$$ LANGUAGE plpgsql;
-- DROP TRIGGER task_assignment_validity ON task
-- DROP TRIGGER task_assignment_update_validity ON task
-- DROP FUNCTION check_task_availability()
-- CREATE TRIGGER task_assignment_validity
-- BEFORE INSERT ON task
-- FOR EACH ROW
-- EXECUTE PROCEDURE check_task_availability();
CREATE TRIGGER task_assignment_update_validity
BEFORE UPDATE OF assigned_user_email ON task
FOR EACH ROW
EXECUTE PROCEDURE check_task_availability();
-- DROP TRIGGER log_task_insertion ON task
-- DROP FUNCTION log_task_creation()
-- Logging user activities
CREATE FUNCTION log_task_creation() RETURNS trigger AS $$
BEGIN
INSERT INTO folderactivities (path, email, time, message)
VALUES (NEW.path, NEW.email, current_timestamp, 'A task was added.');
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER log_task_insertion
AFTER INSERT ON task
FOR EACH ROW
EXECUTE PROCEDURE log_task_creation(); |
<reponame>Harrityin/reports
CREATE TABLE users (
userId INT NOT NULL,
name NVARCHAR(5),
sex NCHAR(1),
school NVARCHAR(100),
grade INT,
class NVARCHAR(10),
PRIMARY KEY (userId),
CONSTRAINT user_id_positive
CHECK (userId > 0),
CONSTRAINT sex_chinese
CHECK (sex IN ('男', '女')),
CONSTRAINT grade_range
CHECK (grade BETWEEN 1 AND 9)
);
|