text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
-- Support multiple GPU
-- frame_history
ALTER TABLE frame_history ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE frame_history ADD COLUMN int_gpu_mem_reserved BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE frame_history ADD COLUMN int_gpu_mem_max_used BIGINT DEFAULT 0 NOT NULL;
-- show_service
ALTER TABLE show_service RENAME COLUMN int_gpu_min TO int_gpu_mem_min;
ALTER TABLE show_service ALTER COLUMN int_gpu_mem_min TYPE BIGINT;
ALTER TABLE show_service ADD COLUMN int_gpus_min INT DEFAULT 0 NOT NULL;
ALTER TABLE show_service ADD COLUMN int_gpus_max INT DEFAULT 0 NOT NULL;
ALTER INDEX i_show_service_int_gpu_min RENAME TO i_show_service_int_gpu_mem_min;;
CREATE INDEX i_show_service_int_gpus_min ON show_service (int_gpus_min);
-- host_local
DROP TRIGGER verify_host_local ON host_local;
ALTER TABLE host_local ALTER COLUMN int_mem_max TYPE BIGINT;
ALTER TABLE host_local ALTER COLUMN int_mem_idle TYPE BIGINT;
ALTER TABLE host_local RENAME COLUMN int_gpu_idle TO int_gpu_mem_idle;
ALTER TABLE host_local ALTER COLUMN int_gpu_mem_idle TYPE BIGINT;
ALTER TABLE host_local RENAME COLUMN int_gpu_max TO int_gpu_mem_max;
ALTER TABLE host_local ALTER COLUMN int_gpu_mem_max TYPE BIGINT;
ALTER TABLE host_local ADD COLUMN int_gpus_idle INT DEFAULT 0 NOT NULL;
ALTER TABLE host_local ADD COLUMN int_gpus_max INT DEFAULT 0 NOT NULL;
CREATE INDEX i_host_local_int_gpus_idle ON host_local (int_gpus_idle);
CREATE INDEX i_host_local_int_gpus_max ON host_local (int_gpus_max);
-- service
ALTER TABLE service RENAME COLUMN int_gpu_min TO int_gpu_mem_min;
ALTER TABLE service ALTER COLUMN int_gpu_mem_min TYPE BIGINT;
ALTER TABLE service ADD COLUMN int_gpus_min INT DEFAULT 0 NOT NULL;
ALTER TABLE service ADD COLUMN int_gpus_max INT DEFAULT 0 NOT NULL;
ALTER INDEX i_service_int_gpu_min RENAME TO i_service_int_gpu_mem_min;
CREATE INDEX i_service_int_gpus_min ON service (int_gpus_min);
-- job_local
ALTER TABLE job_local ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE job_local ADD COLUMN int_max_gpus INT DEFAULT 0 NOT NULL;
-- task
ALTER TABLE task ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE task ADD COLUMN int_adjust_gpus INT DEFAULT 0 NOT NULL;
-- point
ALTER TABLE point ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE point ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
-- folder_resource
ALTER TABLE folder_resource ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE folder_resource ADD COLUMN int_max_gpus INT DEFAULT -1 NOT NULL;
ALTER TABLE folder_resource ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
CREATE INDEX i_folder_res_int_max_gpus ON folder_resource (int_max_gpus);
-- layer_history
ALTER TABLE layer_history ADD COLUMN int_gpus_min INT DEFAULT 0 NOT NULL;
ALTER TABLE layer_history ADD COLUMN int_gpu_time_success BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE layer_history ADD COLUMN int_gpu_time_fail BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE layer_history ADD COLUMN int_gpu_mem_min BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE layer_history ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
-- job_history
ALTER TABLE job_history ADD COLUMN int_gpu_time_success BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE job_history ADD COLUMN int_gpu_time_fail BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE job_history ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
-- job_usage
ALTER TABLE job_usage ADD COLUMN int_gpu_time_success BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE job_usage ADD COLUMN int_gpu_time_fail BIGINT DEFAULT 0 NOT NULL;
-- job_resource
ALTER TABLE job_resource ALTER COLUMN int_max_rss TYPE BIGINT;
ALTER TABLE job_resource ALTER COLUMN int_max_vss TYPE BIGINT;
ALTER TABLE job_resource ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE job_resource ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE job_resource ADD COLUMN int_max_gpus INT DEFAULT 100 NOT NULL;
ALTER TABLE job_resource ADD COLUMN int_local_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE job_resource ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
CREATE INDEX i_job_resource_gpus_min_max ON job_resource (int_min_gpus, int_max_gpus);
CREATE INDEX i_job_resource_gpus ON job_resource (int_gpus);
CREATE INDEX i_job_resource_max_gpus ON job_resource (int_max_gpus);
-- subscription
ALTER TABLE subscription ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
-- show
ALTER TABLE show ADD COLUMN int_default_min_gpus INT DEFAULT 100 NOT NULL;
ALTER TABLE show ADD COLUMN int_default_max_gpus INT DEFAULT 100000 NOT NULL;
-- proc
ALTER TABLE proc RENAME COLUMN int_gpu_reserved TO int_gpu_mem_reserved;
ALTER TABLE proc ALTER COLUMN int_gpu_mem_reserved TYPE BIGINT;
ALTER TABLE proc ADD COLUMN int_gpus_reserved INT DEFAULT 0 NOT NULL;
ALTER TABLE proc ADD COLUMN int_gpu_mem_used BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE proc ADD COLUMN int_gpu_mem_max_used BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE proc ADD COLUMN int_gpu_mem_pre_reserved BIGINT DEFAULT 0 NOT NULL;
ALTER INDEX i_proc_int_gpu_reserved RENAME TO i_proc_int_gpu_mem_reserved;
-- layer_usage
ALTER TABLE layer_usage ADD COLUMN int_gpu_time_success BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE layer_usage ADD COLUMN int_gpu_time_fail BIGINT DEFAULT 0 NOT NULL;
-- layer_mem
ALTER TABLE layer_mem ALTER COLUMN int_max_rss TYPE BIGINT;
ALTER TABLE layer_mem ALTER COLUMN int_max_vss TYPE BIGINT;
ALTER TABLE layer_mem ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
-- layer_resource
ALTER TABLE layer_resource ALTER COLUMN int_max_rss TYPE BIGINT;
ALTER TABLE layer_resource ALTER COLUMN int_max_vss TYPE BIGINT;
ALTER TABLE layer_resource ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE layer_resource ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
-- layer
ALTER TABLE layer RENAME COLUMN int_gpu_min TO int_gpu_mem_min;
ALTER TABLE layer ALTER COLUMN int_gpu_mem_min TYPE BIGINT;
ALTER TABLE layer ADD COLUMN int_gpus_min BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE layer ADD COLUMN int_gpus_max BIGINT DEFAULT 0 NOT NULL;
ALTER INDEX i_layer_int_gpu_min RENAME TO i_layer_int_gpu_mem_min;
CREATE INDEX i_layer_cores_gpus_mem ON layer (int_cores_min, int_gpus_min, int_mem_min, int_gpu_mem_min);
CREATE INDEX i_layer_cores_gpus_mem_thread ON layer (int_cores_min, int_gpus_min, int_mem_min, int_gpu_mem_min, b_threadable);
-- job_mem
ALTER TABLE job_mem ALTER COLUMN int_max_rss TYPE BIGINT;
ALTER TABLE job_mem ALTER COLUMN int_max_vss TYPE BIGINT;
ALTER TABLE job_mem ADD COLUMN int_gpu_mem_max BIGINT DEFAULT 0 NOT NULL;
-- job
ALTER TABLE job ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE job ADD COLUMN int_max_gpus INT DEFAULT 100000 NOT NULL;
-- host_stat
ALTER TABLE host_stat RENAME COLUMN int_gpu_total TO int_gpu_mem_total;
ALTER TABLE host_stat ALTER COLUMN int_gpu_mem_total TYPE BIGINT;
ALTER TABLE host_stat RENAME COLUMN int_gpu_free TO int_gpu_mem_free;
ALTER TABLE host_stat ALTER COLUMN int_gpu_mem_free TYPE BIGINT;
ALTER INDEX i_host_stat_int_gpu_total RENAME TO i_host_stat_int_gpu_mem_total;
ALTER INDEX i_host_stat_int_gpu_free RENAME TO i_host_stat_int_gpu_mem_free;
-- host
ALTER TABLE host RENAME COLUMN int_gpu TO int_gpu_mem;
ALTER TABLE host ALTER COLUMN int_gpu_mem TYPE BIGINT;
ALTER TABLE host RENAME COLUMN int_gpu_idle TO int_gpu_mem_idle;
ALTER TABLE host ALTER COLUMN int_gpu_mem_idle TYPE BIGINT;
ALTER TABLE host ADD COLUMN int_gpus BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE host ADD COLUMN int_gpus_idle BIGINT DEFAULT 0 NOT NULL;
CREATE INDEX i_host_int_gpu_mem ON host (int_gpu_mem);
CREATE INDEX i_host_int_gpu_mem_idle ON host (int_gpu_mem_idle);
CREATE INDEX i_host_int_gpus ON host (int_gpus);
CREATE INDEX i_host_int_gpus_idle ON host (int_gpus_idle);
-- frame
ALTER TABLE frame RENAME COLUMN int_gpu_reserved TO int_gpu_mem_reserved;
ALTER TABLE frame ALTER COLUMN int_gpu_mem_reserved TYPE BIGINT;
ALTER TABLE frame ADD COLUMN int_gpu_mem_used BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE frame ADD COLUMN int_gpu_mem_max_used BIGINT DEFAULT 0 NOT NULL;
ALTER TABLE frame ADD COLUMN int_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE frame ADD COLUMN int_total_past_gpu_time INT DEFAULT 0 NOT NULL;
ALTER INDEX i_frame_int_gpu_reserved RENAME TO i_frame_int_gpu_mem_reserved;
-- folder
ALTER TABLE folder ADD COLUMN int_job_min_gpus INT DEFAULT -1 NOT NULL;
ALTER TABLE folder ADD COLUMN int_job_max_gpus INT DEFAULT -1 NOT NULL;
ALTER TABLE folder ADD COLUMN int_min_gpus INT DEFAULT 0 NOT NULL;
ALTER TABLE folder ADD COLUMN int_max_gpus INT DEFAULT -1 NOT NULL;
-- Views
DROP VIEW vs_show_resource;
CREATE VIEW vs_show_resource (pk_show, int_cores, int_gpus) AS
SELECT
job.pk_show,
SUM(int_cores) AS int_cores, SUM(int_gpus) AS int_gpus
FROM
job,
job_resource
WHERE
job.pk_job = job_resource.pk_job
AND
job.str_state='PENDING'
GROUP BY
job.pk_show;
DROP VIEW vs_job_resource;
CREATE VIEW vs_job_resource (pk_job, int_procs, int_cores, int_gpus, int_mem_reserved) AS
SELECT
job.pk_job,
COUNT(proc.pk_proc) AS int_procs,
COALESCE(SUM(int_cores_reserved),0) AS int_cores,
COALESCE(SUM(int_gpus_reserved),0) AS int_gpus,
COALESCE(SUM(int_mem_reserved),0) AS int_mem_reserved
FROM
job LEFT JOIN proc ON (proc.pk_job = job.pk_job)
GROUP BY
job.pk_job;
DROP VIEW vs_alloc_usage;
CREATE VIEW vs_alloc_usage (pk_alloc, int_cores, int_idle_cores, int_running_cores, int_locked_cores, int_available_cores, int_gpus, int_idle_gpus, int_running_gpus, int_locked_gpus, int_available_gpus, int_hosts, int_locked_hosts, int_down_hosts) AS
SELECT
alloc.pk_alloc,
COALESCE(SUM(host.int_cores),0) AS int_cores,
COALESCE(SUM(host.int_cores_idle),0) AS int_idle_cores,
COALESCE(SUM(host.int_cores - host.int_cores_idle),0) as int_running_cores,
COALESCE((SELECT SUM(int_cores) FROM host WHERE host.pk_alloc=alloc.pk_alloc AND (str_lock_state='NIMBY_LOCKED' OR str_lock_state='LOCKED')),0) AS int_locked_cores,
COALESCE((SELECT SUM(int_cores_idle) FROM host h,host_stat hs WHERE h.pk_host = hs.pk_host AND h.pk_alloc=alloc.pk_alloc AND h.str_lock_state='OPEN' AND hs.str_state ='UP'),0) AS int_available_cores,
COALESCE(SUM(host.int_gpus),0) AS int_gpus,
COALESCE(SUM(host.int_gpus_idle),0) AS int_idle_gpus,
COALESCE(SUM(host.int_gpus - host.int_gpus_idle),0) as int_running_gpus,
COALESCE((SELECT SUM(int_gpus) FROM host WHERE host.pk_alloc=alloc.pk_alloc AND (str_lock_state='NIMBY_LOCKED' OR str_lock_state='LOCKED')),0) AS int_locked_gpus,
COALESCE((SELECT SUM(int_gpus_idle) FROM host h,host_stat hs WHERE h.pk_host = hs.pk_host AND h.pk_alloc=alloc.pk_alloc AND h.str_lock_state='OPEN' AND hs.str_state ='UP'),0) AS int_available_gpus,
COUNT(host.pk_host) AS int_hosts,
(SELECT COUNT(*) FROM host WHERE host.pk_alloc=alloc.pk_alloc AND str_lock_state='LOCKED') AS int_locked_hosts,
(SELECT COUNT(*) FROM host h,host_stat hs WHERE h.pk_host = hs.pk_host AND h.pk_alloc=alloc.pk_alloc AND hs.str_state='DOWN') AS int_down_hosts
FROM
alloc LEFT JOIN host ON (alloc.pk_alloc = host.pk_alloc)
GROUP BY
alloc.pk_alloc;
DROP VIEW vs_folder_counts;
CREATE VIEW vs_folder_counts (pk_folder, int_depend_count, int_waiting_count, int_running_count, int_dead_count, int_cores, int_gpus, int_job_count) AS
SELECT
folder.pk_folder,
COALESCE(SUM(int_depend_count),0) AS int_depend_count,
COALESCE(SUM(int_waiting_count),0) AS int_waiting_count,
COALESCE(SUM(int_running_count),0) AS int_running_count,
COALESCE(SUM(int_dead_count),0) AS int_dead_count,
COALESCE(SUM(int_cores),0) AS int_cores,
COALESCE(SUM(int_gpus),0) AS int_gpus,
COALESCE(COUNT(job.pk_job),0) AS int_job_count
FROM
folder
LEFT JOIN
job ON (folder.pk_folder = job.pk_folder AND job.str_state='PENDING')
LEFT JOIN
job_stat ON (job.pk_job = job_stat.pk_job)
LEFT JOIN
job_resource ON (job.pk_job = job_resource.pk_job)
GROUP BY
folder.pk_folder;
DROP VIEW v_history_frame;
CREATE VIEW v_history_frame (pk_frame_history, pk_frame, pk_layer, pk_job, str_name, str_state,
int_mem_reserved, int_mem_max_used, int_cores, int_gpu_mem_reserved, int_gpu_mem_max_used, int_gpus,
str_host, int_exit_status, str_alloc_name,
b_alloc_billable, str_facility_name, int_ts_started, int_ts_stopped, int_checkpoint_count,
str_show_name, dt_last_modified) AS
SELECT
fh.PK_FRAME_HISTORY,
fh.PK_FRAME,
fh.PK_LAYER,
fh.PK_JOB,
fh.STR_NAME,
fh.STR_STATE,
fh.INT_MEM_RESERVED,
fh.INT_MEM_MAX_USED,
fh.INT_CORES,
fh.INT_GPU_MEM_RESERVED,
fh.INT_GPU_MEM_MAX_USED,
fh.INT_GPUS,
fh.STR_HOST,
fh.INT_EXIT_STATUS,
a.STR_NAME STR_ALLOC_NAME,
a.B_BILLABLE B_ALLOC_BILLABLE,
f.STR_NAME STR_FACILITY_NAME,
fh.INT_TS_STARTED,
fh.INT_TS_STOPPED,
fh.INT_CHECKPOINT_COUNT,
null str_show_name,
fh.dt_last_modified
FROM frame_history fh
JOIN job_history jh
ON fh.pk_job = jh.pk_job
LEFT OUTER JOIN alloc a
ON fh.pk_alloc = a.pk_alloc
LEFT OUTER JOIN facility f
ON a.pk_facility = f.pk_facility
WHERE fh.dt_last_modified >= (SELECT dt_begin FROM history_period)
AND fh.dt_last_modified < (SELECT dt_end FROM history_period);
DROP VIEW v_history_job;
CREATE VIEW v_history_job (pk_job, str_name, str_shot, str_user, int_core_time_success, int_core_time_fail, int_gpu_time_success, int_gpu_time_fail, int_frame_count, int_layer_count, int_waiting_count, int_dead_count, int_depend_count, int_eaten_count, int_succeeded_count, int_running_count, int_max_rss, int_gpu_mem_max, b_archived, str_facility_name, str_dept_name, int_ts_started, int_ts_stopped, str_show_name, dt_last_modified) AS
select
jh.PK_JOB,
jh.STR_NAME,
jh.STR_SHOT,
jh.STR_USER,
jh.INT_CORE_TIME_SUCCESS,
jh.INT_CORE_TIME_FAIL,
jh.INT_GPU_TIME_SUCCESS,
jh.INT_GPU_TIME_FAIL,
jh.INT_FRAME_COUNT,
jh.INT_LAYER_COUNT,
jh.INT_WAITING_COUNT,
jh.INT_DEAD_COUNT,
jh.INT_DEPEND_COUNT,
jh.INT_EATEN_COUNT,
jh.INT_SUCCEEDED_COUNT,
jh.INT_RUNNING_COUNT,
jh.INT_MAX_RSS,
jh.INT_GPU_MEM_MAX,
jh.B_ARCHIVED,
f.str_name STR_FACILITY_NAME,
d.str_name str_dept_name,
jh.INT_TS_STARTED,
jh.INT_TS_STOPPED,
s.str_name str_show_name,
jh.dt_last_modified
from job_history jh, show s, facility f, dept d
where jh.pk_show = s.pk_show
and jh.pk_facility = f.pk_facility
and jh.pk_dept = d.pk_dept
and (
jh.dt_last_modified >= (
select dt_begin
from history_period
)
or
jh.int_ts_stopped = 0
);
DROP VIEW v_history_layer;
CREATE VIEW v_history_layer (pk_layer, pk_job, str_name, str_type, int_cores_min,
int_mem_min, int_gpus_min, int_gpu_mem_min, int_core_time_success, int_core_time_fail,
int_gpu_time_success, int_gpu_time_fail, int_frame_count, int_layer_count,
int_waiting_count, int_dead_count, int_depend_count, int_eaten_count, int_succeeded_count,
int_running_count, int_max_rss, int_gpu_mem_max, b_archived, str_services, str_show_name, dt_last_modified) AS
SELECT
lh.PK_LAYER,
lh.PK_JOB,
lh.STR_NAME,
lh.STR_TYPE,
lh.INT_CORES_MIN,
lh.INT_MEM_MIN,
lh.INT_GPUS_MIN,
lh.INT_GPU_MEM_MIN,
lh.INT_CORE_TIME_SUCCESS,
lh.INT_CORE_TIME_FAIL,
lh.INT_GPU_TIME_SUCCESS,
lh.INT_GPU_TIME_FAIL,
lh.INT_FRAME_COUNT,
lh.INT_LAYER_COUNT,
lh.INT_WAITING_COUNT,
lh.INT_DEAD_COUNT,
lh.INT_DEPEND_COUNT,
lh.INT_EATEN_COUNT,
lh.INT_SUCCEEDED_COUNT,
lh.INT_RUNNING_COUNT,
lh.INT_MAX_RSS,
lh.INT_GPU_MEM_MAX,
lh.B_ARCHIVED,
lh.STR_SERVICES,
s.str_name str_show_name,
lh.dt_last_modified
from layer_history lh, job_history jh, show s
where lh.pk_job = jh.pk_job
and jh.pk_show = s.pk_show
and jh.dt_last_modified >= (
select dt_begin
from history_period
)
and jh.dt_last_modified < (
select dt_end
from history_period
);
-- Types
ALTER TYPE JobStatType ADD ATTRIBUTE int_gpu_time_success BIGINT;
ALTER TYPE JobStatType ADD ATTRIBUTE int_gpu_time_fail BIGINT;
ALTER TYPE JobStatType ADD ATTRIBUTE int_gpu_mem_max BIGINT;
ALTER TYPE LayerStatType ADD ATTRIBUTE int_gpu_time_success BIGINT;
ALTER TYPE LayerStatType ADD ATTRIBUTE int_gpu_time_fail BIGINT;
ALTER TYPE LayerStatType ADD ATTRIBUTE int_gpu_mem_max BIGINT;
-- Functions
CREATE OR REPLACE FUNCTION recalculate_subs()
RETURNS VOID AS $body$
DECLARE
r RECORD;
BEGIN
--
-- concatenates all tags in host_tag and sets host.str_tags
--
UPDATE subscription SET int_cores = 0;
UPDATE subscription SET int_gpus = 0;
FOR r IN
SELECT proc.pk_show, alloc.pk_alloc, sum(proc.int_cores_reserved) as c, sum(proc.int_gpus_reserved) as d
FROM proc, host, alloc
WHERE proc.pk_host = host.pk_host AND host.pk_alloc = alloc.pk_alloc
GROUP BY proc.pk_show, alloc.pk_alloc
LOOP
UPDATE subscription SET int_cores = r.c, int_gpus = r.d WHERE pk_alloc=r.pk_alloc AND pk_show=r.pk_show;
END LOOP;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION tmp_populate_folder()
RETURNS VOID AS $body$
DECLARE
t RECORD;
BEGIN
FOR t IN
SELECT pk_folder, pk_show, sum(int_cores) AS c, sum(int_gpus) AS d
FROM job, job_resource
WHERE job.pk_job = job_resource.pk_job
GROUP by pk_folder, pk_show
LOOP
UPDATE folder_resource SET int_cores = t.c, int_gpus = t.d WHERE pk_folder = t.pk_folder;
COMMIT;
END LOOP;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION tmp_populate_point()
RETURNS VOID AS $body$
DECLARE
t RECORD;
BEGIN
FOR t IN
SELECT pk_dept, pk_show, sum(int_cores) AS c, sum(int_gpus) AS d
FROM job, job_resource
WHERE job.pk_job = job_resource.pk_job
GROUP BY pk_dept, pk_show
LOOP
UPDATE point SET int_cores = t.c , int_gpus = t.d WHERE pk_show = t.pk_show AND pk_dept = t.pk_dept;
END LOOP;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION tmp_populate_sub()
RETURNS VOID AS $body$
DECLARE
t RECORD;
BEGIN
FOR t IN
SELECT proc.pk_show, host.pk_alloc, sum(int_cores_reserved) AS c, sum(int_gpus_reserved) AS d
FROM proc, host
WHERE proc.pk_host = host.pk_host
GROUP BY proc.pk_show, host.pk_alloc
LOOP
UPDATE subscription SET int_cores = t.c, int_gpus = t.d WHERE pk_show = t.pk_show AND pk_alloc = t.pk_alloc;
END LOOP;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__after_job_moved()
RETURNS TRIGGER AS $body$
DECLARE
int_core_count INT;
int_gpu_count INT;
BEGIN
SELECT int_cores, int_gpus INTO int_core_count, int_gpu_count
FROM job_resource WHERE pk_job = NEW.pk_job;
IF int_core_count > 0 THEN
UPDATE folder_resource SET int_cores = int_cores + int_core_count
WHERE pk_folder = NEW.pk_folder;
UPDATE folder_resource SET int_cores = int_cores - int_core_count
WHERE pk_folder = OLD.pk_folder;
END IF;
IF int_gpu_count > 0 THEN
UPDATE folder_resource SET int_gpus = int_gpus + int_gpu_count
WHERE pk_folder = NEW.pk_folder;
UPDATE folder_resource SET int_gpus = int_gpus - int_gpu_count
WHERE pk_folder = OLD.pk_folder;
END IF;
RETURN NULL;
END
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__before_delete_job()
RETURNS TRIGGER AS $body$
DECLARE
js JobStatType;
BEGIN
SELECT
job_usage.int_core_time_success,
job_usage.int_core_time_fail,
job_usage.int_gpu_time_success,
job_usage.int_gpu_time_fail,
job_stat.int_waiting_count,
job_stat.int_dead_count,
job_stat.int_depend_count,
job_stat.int_eaten_count,
job_stat.int_succeeded_count,
job_stat.int_running_count,
job_mem.int_max_rss,
job_mem.int_gpu_mem_max
INTO
js
FROM
job_mem,
job_usage,
job_stat
WHERE
job_usage.pk_job = job_mem.pk_job
AND
job_stat.pk_job = job_mem.pk_job
AND
job_mem.pk_job = OLD.pk_job;
UPDATE
job_history
SET
pk_dept = OLD.pk_dept,
int_core_time_success = js.int_core_time_success,
int_core_time_fail = js.int_core_time_fail,
int_gpu_time_success = js.int_gpu_time_success,
int_gpu_time_fail = js.int_gpu_time_fail,
int_frame_count = OLD.int_frame_count,
int_layer_count = OLD.int_layer_count,
int_waiting_count = js.int_waiting_count,
int_dead_count = js.int_dead_count,
int_depend_count = js.int_depend_count,
int_eaten_count = js.int_eaten_count,
int_succeeded_count = js.int_succeeded_count,
int_running_count = js.int_running_count,
int_max_rss = js.int_max_rss,
int_gpu_mem_max = js.int_gpu_mem_max,
b_archived = true,
int_ts_stopped = COALESCE(epoch(OLD.ts_stopped), epoch(current_timestamp))
WHERE
pk_job = OLD.pk_job;
DELETE FROM depend WHERE pk_job_depend_on=OLD.pk_job OR pk_job_depend_er=OLD.pk_job;
DELETE FROM frame WHERE pk_job=OLD.pk_job;
DELETE FROM layer WHERE pk_job=OLD.pk_job;
DELETE FROM job_env WHERE pk_job=OLD.pk_job;
DELETE FROM job_stat WHERE pk_job=OLD.pk_job;
DELETE FROM job_resource WHERE pk_job=OLD.pk_job;
DELETE FROM job_usage WHERE pk_job=OLD.pk_job;
DELETE FROM job_mem WHERE pk_job=OLD.pk_job;
DELETE FROM comments WHERE pk_job=OLD.pk_job;
RETURN OLD;
END
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__after_job_finished()
RETURNS TRIGGER AS $body$
DECLARE
ts INT := cast(epoch(current_timestamp) as integer);
js JobStatType;
ls LayerStatType;
one_layer RECORD;
BEGIN
SELECT
job_usage.int_core_time_success,
job_usage.int_core_time_fail,
job_usage.int_gpu_time_success,
job_usage.int_gpu_time_fail,
job_stat.int_waiting_count,
job_stat.int_dead_count,
job_stat.int_depend_count,
job_stat.int_eaten_count,
job_stat.int_succeeded_count,
job_stat.int_running_count,
job_mem.int_max_rss,
job_mem.int_gpu_mem_max
INTO
js
FROM
job_mem,
job_usage,
job_stat
WHERE
job_usage.pk_job = job_mem.pk_job
AND
job_stat.pk_job = job_mem.pk_job
AND
job_mem.pk_job = NEW.pk_job;
UPDATE
job_history
SET
pk_dept = NEW.pk_dept,
int_core_time_success = js.int_core_time_success,
int_core_time_fail = js.int_core_time_fail,
int_gpu_time_success = js.int_gpu_time_success,
int_gpu_time_fail = js.int_gpu_time_fail,
int_frame_count = NEW.int_frame_count,
int_layer_count = NEW.int_layer_count,
int_waiting_count = js.int_waiting_count,
int_dead_count = js.int_dead_count,
int_depend_count = js.int_depend_count,
int_eaten_count = js.int_eaten_count,
int_succeeded_count = js.int_succeeded_count,
int_running_count = js.int_running_count,
int_max_rss = js.int_max_rss,
int_gpu_mem_max = js.int_gpu_mem_max,
int_ts_stopped = ts
WHERE
pk_job = NEW.pk_job;
FOR one_layer IN (SELECT pk_layer from layer where pk_job = NEW.pk_job)
LOOP
SELECT
layer_usage.int_core_time_success,
layer_usage.int_core_time_fail,
layer_usage.int_gpu_time_success,
layer_usage.int_gpu_time_fail,
layer_stat.int_total_count,
layer_stat.int_waiting_count,
layer_stat.int_dead_count,
layer_stat.int_depend_count,
layer_stat.int_eaten_count,
layer_stat.int_succeeded_count,
layer_stat.int_running_count,
layer_mem.int_max_rss,
layer_mem.int_gpu_mem_max
INTO
ls
FROM
layer_mem,
layer_usage,
layer_stat
WHERE
layer_usage.pk_layer = layer_mem.pk_layer
AND
layer_stat.pk_layer = layer_mem.pk_layer
AND
layer_mem.pk_layer = one_layer.pk_layer;
UPDATE
layer_history
SET
int_core_time_success = ls.int_core_time_success,
int_core_time_fail = ls.int_core_time_fail,
int_gpu_time_success = ls.int_gpu_time_success,
int_gpu_time_fail = ls.int_gpu_time_fail,
int_frame_count = ls.int_total_count,
int_waiting_count = ls.int_waiting_count,
int_dead_count = ls.int_dead_count,
int_depend_count = ls.int_depend_count,
int_eaten_count = ls.int_eaten_count,
int_succeeded_count = ls.int_succeeded_count,
int_running_count = ls.int_running_count,
int_max_rss = ls.int_max_rss,
int_gpu_mem_max = ls.int_gpu_mem_max
WHERE
pk_layer = one_layer.pk_layer;
END LOOP;
/**
* Delete any local core assignments from this job.
**/
DELETE FROM job_local WHERE pk_job=NEW.pk_job;
RETURN NEW;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__after_job_dept_update()
RETURNS TRIGGER AS $body$
DECLARE
int_running_cores INT;
int_running_gpus INT;
BEGIN
/**
* Handles the accounting for moving a job between departments.
**/
SELECT int_cores, int_gpus INTO int_running_cores, int_running_gpus
FROM job_resource WHERE pk_job = NEW.pk_job;
IF int_running_cores > 0 THEN
UPDATE point SET int_cores = int_cores + int_running_cores
WHERE pk_dept = NEW.pk_dept AND pk_show = NEW.pk_show;
UPDATE point SET int_cores = int_cores - int_running_cores
WHERE pk_dept = OLD.pk_dept AND pk_show = OLD.pk_show;
END IF;
IF int_running_gpus > 0 THEN
UPDATE point SET int_gpus = int_gpus + int_running_gpus
WHERE pk_dept = NEW.pk_dept AND pk_show = NEW.pk_show;
UPDATE point SET int_gpus = int_gpus - int_running_gpus
WHERE pk_dept = OLD.pk_dept AND pk_show = OLD.pk_show;
END IF;
RETURN NULL;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__verify_host_local()
RETURNS TRIGGER AS $body$
BEGIN
/**
* Check to see if the new cores exceeds max cores. This check is only
* done if NEW.int_max_cores is equal to OLD.int_max_cores and
* NEW.int_cores > OLD.int_cores, otherwise this error will be thrown
* when people lower the max.
**/
IF NEW.int_cores_idle < 0 THEN
RAISE EXCEPTION 'host local doesnt have enough idle cores.';
END IF;
IF NEW.int_mem_idle < 0 THEN
RAISE EXCEPTION 'host local doesnt have enough idle memory';
END IF;
IF NEW.int_gpus_idle < 0 THEN
RAISE EXCEPTION 'host local doesnt have enough GPU idle cores.';
END IF;
IF NEW.int_gpu_mem_idle < 0 THEN
RAISE EXCEPTION 'host local doesnt have enough GPU idle memory.';
END IF;
RETURN NEW;
END;
$body$
LANGUAGE PLPGSQL;
CREATE TRIGGER verify_host_local BEFORE UPDATE ON host_local
FOR EACH ROW
WHEN ((NEW.int_cores_max = OLD.int_cores_max AND NEW.int_mem_max = OLD.int_mem_max) AND
(NEW.int_cores_idle != OLD.int_cores_idle OR NEW.int_mem_idle != OLD.int_mem_idle) AND
(NEW.int_gpus_max = OLD.int_gpus_max AND NEW.int_gpu_mem_max = OLD.int_gpu_mem_max) AND
(NEW.int_gpus_idle != OLD.int_gpus_idle OR NEW.int_gpu_mem_idle != OLD.int_gpu_mem_idle))
EXECUTE PROCEDURE trigger__verify_host_local();
CREATE OR REPLACE FUNCTION trigger__after_insert_layer()
RETURNS TRIGGER AS $body$
BEGIN
INSERT INTO layer_stat (pk_layer_stat, pk_layer, pk_job) VALUES (NEW.pk_layer, NEW.pk_layer, NEW.pk_job);
INSERT INTO layer_resource (pk_layer_resource, pk_layer, pk_job) VALUES (NEW.pk_layer, NEW.pk_layer, NEW.pk_job);
INSERT INTO layer_usage (pk_layer_usage, pk_layer, pk_job) VALUES (NEW.pk_layer, NEW.pk_layer, NEW.pk_job);
INSERT INTO layer_mem (pk_layer_mem, pk_layer, pk_job) VALUES (NEW.pk_layer, NEW.pk_layer, NEW.pk_job);
INSERT INTO layer_history
(pk_layer, pk_job, str_name, str_type, int_cores_min, int_mem_min, int_gpus_min, int_gpu_mem_min, b_archived,str_services)
VALUES
(NEW.pk_layer, NEW.pk_job, NEW.str_name, NEW.str_type, NEW.int_cores_min, NEW.int_mem_min, NEW.int_gpus_min, NEW.int_gpu_mem_min, false, NEW.str_services);
RETURN NEW;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__before_delete_layer()
RETURNS TRIGGER AS $body$
DECLARE
js LayerStatType;
BEGIN
SELECT
layer_usage.int_core_time_success,
layer_usage.int_core_time_fail,
layer_usage.int_gpu_time_success,
layer_usage.int_gpu_time_fail,
layer_stat.int_total_count,
layer_stat.int_waiting_count,
layer_stat.int_dead_count,
layer_stat.int_depend_count,
layer_stat.int_eaten_count,
layer_stat.int_succeeded_count,
layer_stat.int_running_count,
layer_mem.int_max_rss,
layer_mem.int_gpu_mem_max
INTO
js
FROM
layer_mem,
layer_usage,
layer_stat
WHERE
layer_usage.pk_layer = layer_mem.pk_layer
AND
layer_stat.pk_layer = layer_mem.pk_layer
AND
layer_mem.pk_layer = OLD.pk_layer;
UPDATE
layer_history
SET
int_core_time_success = js.int_core_time_success,
int_core_time_fail = js.int_core_time_fail,
int_gpu_time_success = js.int_gpu_time_success,
int_gpu_time_fail = js.int_gpu_time_fail,
int_frame_count = js.int_total_count,
int_waiting_count = js.int_waiting_count,
int_dead_count = js.int_dead_count,
int_depend_count = js.int_depend_count,
int_eaten_count = js.int_eaten_count,
int_succeeded_count = js.int_succeeded_count,
int_running_count = js.int_running_count,
int_max_rss = js.int_max_rss,
int_gpu_mem_max = js.int_gpu_mem_max,
b_archived = true
WHERE
pk_layer = OLD.pk_layer;
DELETE FROM layer_resource where pk_layer=OLD.pk_layer;
DELETE FROM layer_stat where pk_layer=OLD.pk_layer;
DELETE FROM layer_usage where pk_layer=OLD.pk_layer;
DELETE FROM layer_env where pk_layer=OLD.pk_layer;
DELETE FROM layer_mem where pk_layer=OLD.pk_layer;
DELETE FROM layer_output where pk_layer=OLD.pk_layer;
RETURN OLD;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__verify_host_resources()
RETURNS TRIGGER AS $body$
BEGIN
IF NEW.int_cores_idle < 0 THEN
RAISE EXCEPTION 'unable to allocate additional core units';
END IF;
If NEW.int_mem_idle < 0 THEN
RAISE EXCEPTION 'unable to allocate additional memory';
END IF;
If NEW.int_gpus_idle < 0 THEN
RAISE EXCEPTION 'unable to allocate additional GPU units';
END IF;
If NEW.int_gpu_mem_idle < 0 THEN
RAISE EXCEPTION 'unable to allocate additional GPU memory';
END IF;
RETURN NEW;
END;
$body$
LANGUAGE PLPGSQL;
DROP TRIGGER verify_host_resources ON host;
CREATE TRIGGER verify_host_resources BEFORE UPDATE ON host
FOR EACH ROW
WHEN (NEW.int_cores_idle != OLD.int_cores_idle
OR NEW.int_mem_idle != OLD.int_mem_idle
OR NEW.int_gpus_idle != OLD.int_gpus_idle
OR NEW.int_gpu_mem_idle != OLD.int_gpu_mem_idle)
EXECUTE PROCEDURE trigger__verify_host_resources();
CREATE OR REPLACE FUNCTION trigger__verify_job_resources()
RETURNS TRIGGER AS $body$
BEGIN
/**
* Check to see if the new cores exceeds max cores. This check is only
* done if NEW.int_max_cores is equal to OLD.int_max_cores and
* NEW.int_cores > OLD.int_cores, otherwise this error will be thrown
* at the wrong time.
**/
IF NEW.int_cores > NEW.int_max_cores THEN
RAISE EXCEPTION 'job has exceeded max cores';
END IF;
IF NEW.int_gpus > NEW.int_max_gpus THEN
RAISE EXCEPTION 'job has exceeded max GPU units';
END IF;
RETURN NEW;
END;
$body$
LANGUAGE PLPGSQL;
DROP TRIGGER verify_job_resources ON job_resource;
CREATE TRIGGER verify_job_resources BEFORE UPDATE ON job_resource
FOR EACH ROW
WHEN (NEW.int_max_cores = OLD.int_max_cores AND NEW.int_cores > OLD.int_cores OR
NEW.int_max_gpus = OLD.int_max_gpus AND NEW.int_gpus > OLD.int_gpus)
EXECUTE PROCEDURE trigger__verify_job_resources();
CREATE OR REPLACE FUNCTION trigger__update_proc_update_layer()
RETURNS TRIGGER AS $body$
DECLARE
lr RECORD;
BEGIN
FOR lr IN (
SELECT
pk_layer
FROM
layer_stat
WHERE
pk_layer IN (OLD.pk_layer, NEW.pk_layer)
ORDER BY layer_stat.pk_layer DESC
) LOOP
IF lr.pk_layer = OLD.pk_layer THEN
UPDATE layer_resource SET
int_cores = int_cores - OLD.int_cores_reserved,
int_gpus = int_gpus - OLD.int_gpus_reserved
WHERE
pk_layer = OLD.pk_layer;
ELSE
UPDATE layer_resource SET
int_cores = int_cores + NEW.int_cores_reserved,
int_gpus = int_gpus + NEW.int_gpus_reserved
WHERE
pk_layer = NEW.pk_layer;
END IF;
END LOOP;
RETURN NULL;
END;
$body$
LANGUAGE PLPGSQL;
CREATE OR REPLACE FUNCTION trigger__frame_history_open()
RETURNS TRIGGER AS $body$
DECLARE
str_pk_alloc VARCHAR(36) := null;
int_checkpoint INT := 0;
BEGIN
IF OLD.str_state = 'RUNNING' THEN
IF NEW.int_exit_status = 299 THEN
EXECUTE 'DELETE FROM frame_history WHERE int_ts_stopped = 0 AND pk_frame=$1' USING
NEW.pk_frame;
ELSE
If NEW.str_state = 'CHECKPOINT' THEN
int_checkpoint := 1;
END IF;
EXECUTE
'UPDATE
frame_history
SET
int_mem_max_used=$1,
int_gpu_mem_max_used=$2,
int_ts_stopped=$3,
int_exit_status=$4,
int_checkpoint_count=$5
WHERE
int_ts_stopped = 0 AND pk_frame=$6'
USING
NEW.int_mem_max_used,
NEW.int_gpu_mem_max_used,
epoch(current_timestamp),
NEW.int_exit_status,
int_checkpoint,
NEW.pk_frame;
END IF;
END IF;
IF NEW.str_state = 'RUNNING' THEN
SELECT pk_alloc INTO str_pk_alloc FROM host WHERE str_name=NEW.str_host;
EXECUTE
'INSERT INTO
frame_history
(
pk_frame,
pk_layer,
pk_job,
str_name,
str_state,
int_cores,
int_mem_reserved,
int_gpus,
int_gpu_mem_reserved,
str_host,
int_ts_started,
pk_alloc
)
VALUES
($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)'
USING NEW.pk_frame,
NEW.pk_layer,
NEW.pk_job,
NEW.str_name,
'RUNNING',
NEW.int_cores,
NEW.int_mem_reserved,
NEW.int_gpus,
NEW.int_gpu_mem_reserved,
NEW.str_host,
epoch(current_timestamp),
str_pk_alloc;
END IF;
RETURN NULL;
END;
$body$
LANGUAGE PLPGSQL; | the_stack |
-- Your SQL goes here
CREATE SCHEMA analytics_service;
CREATE SCHEMA analytics_service_api;
CREATE SCHEMA community_service;
CREATE SCHEMA community_service_api;
CREATE SCHEMA core;
CREATE SCHEMA core_validator;
CREATE SCHEMA payment_service;
CREATE SCHEMA payment_service_api;
CREATE SCHEMA platform_service;
CREATE SCHEMA platform_service_api;
CREATE SCHEMA project_service;
CREATE SCHEMA project_service_api;
CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
CREATE DOMAIN email AS citext
CONSTRAINT email_check CHECK ((VALUE ~ '^[a-zA-Z0-9.!#$%&''*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$'::citext));
CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS unaccent WITH SCHEMA public;
CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
CREATE TYPE core.jwt_token AS (
token text
);
CREATE TYPE project_service.project_mode AS ENUM (
'aon',
'flex',
'sub'
);
CREATE TYPE project_service.shipping_options_enum AS ENUM (
'free',
'national',
'international',
'presential'
);
CREATE TYPE payment_service.payment_status AS ENUM (
'pending',
'paid',
'refused',
'refunded',
'chargedback',
'deleted',
'error'
);
CREATE TYPE payment_service.subscription_status AS ENUM (
'started',
'active',
'inactive',
'canceled',
'deleted',
'error'
);
CREATE TABLE core.core_settings (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL,
name character varying(100) NOT NULL,
value text NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE core.core_settings IS 'hold global settings for another services';
CREATE TABLE platform_service.users (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
email text NOT NULL,
password text NOT NULL,
name text NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL,
email_confirmed_at timestamp without time zone,
disabled_at timestamp without time zone,
CONSTRAINT users_email_check CHECK ((email ~* '^.+@.+\..+$'::text)),
CONSTRAINT users_name_check CHECK ((length(name) < 255)),
CONSTRAINT users_password_check CHECK ((length(password) < 512))
);
COMMENT ON TABLE platform_service.users IS 'Platform admin users';
CREATE TABLE platform_service.platforms (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
name text NOT NULL,
settings jsonb DEFAULT '{}'::jsonb NOT NULL,
token uuid DEFAULT public.uuid_generate_v4() NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE platform_service.platforms IS 'hold platforms names/configurations';
CREATE TABLE platform_service.platform_api_keys (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
token text NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
disabled_at timestamp without time zone
);
CREATE TABLE platform_service.platform_users (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
user_id uuid NOT NULL,
platform_id uuid NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE platform_service.platform_users IS 'Manage platform user with platform';
CREATE TABLE community_service.users (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
external_id text,
email public.email NOT NULL,
password text NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
key uuid DEFAULT public.uuid_generate_v4() NOT NULL,
CONSTRAINT users_password_check CHECK ((length(password) < 512))
);
COMMENT ON TABLE community_service.users IS 'Stores community users';
CREATE TABLE community_service.user_versions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
user_id uuid NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
CREATE TABLE payment_service.catalog_payments (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
project_id uuid NOT NULL,
user_id uuid NOT NULL,
subscription_id uuid,
reward_id uuid,
data jsonb NOT NULL,
gateway text NOT NULL,
gateway_cached_data jsonb,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL,
common_contract_data jsonb DEFAULT '{}'::jsonb NOT NULL,
gateway_general_data jsonb DEFAULT '{}'::jsonb NOT NULL,
status payment_service.payment_status DEFAULT 'pending' NOT NULL,
external_id text
);
COMMENT ON TABLE payment_service.catalog_payments IS 'Store initial payments data to sent to queue';
CREATE TABLE payment_service.catalog_payment_versions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
catalog_payment_id uuid NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE payment_service.catalog_payment_versions IS 'store catalog payment versions when need to be updated';
CREATE TABLE payment_service.payment_status_transitions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
catalog_payment_id uuid NOT NULL,
from_status payment_service.payment_status NOT NULL,
to_status payment_service.payment_status NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE payment_service.payment_status_transitions IS 'store the payment status changes';
CREATE TABLE payment_service.subscriptions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
project_id uuid NOT NULL,
user_id uuid NOT NULL,
reward_id uuid,
credit_card_id uuid,
status payment_service.subscription_status DEFAULT 'started' NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL,
checkout_data jsonb DEFAULT '{}'::jsonb NOT NULL
);
COMMENT ON TABLE payment_service.subscriptions IS 'Store subscription transitions between charges';
CREATE TABLE payment_service.subscription_status_transitions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
subscription_id uuid NOT NULL,
from_status payment_service.subscription_status NOT NULL,
to_status payment_service.subscription_status NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
CREATE TABLE payment_service.credit_cards (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
user_id uuid NOT NULL,
gateway text NOT NULL,
gateway_data jsonb DEFAULT '{}'::jsonb NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE payment_service.credit_cards IS 'Store gateway credit_cards references';
CREATE TABLE project_service.projects (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
platform_id uuid NOT NULL,
user_id uuid NOT NULL,
name text NOT NULL,
mode project_service.project_mode NOT NULL,
key uuid DEFAULT public.uuid_generate_v4() NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
permalink text NOT NULL,
status text DEFAULT 'draft'::text NOT NULL,
external_id text,
CONSTRAINT chk_permalink CHECK ((permalink ~* '\A(\w|-)*\Z'::text))
);
COMMENT ON TABLE project_service.projects IS 'store project data for platforms';
CREATE TABLE project_service.project_versions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
project_id uuid NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
COMMENT ON TABLE project_service.project_versions IS 'Store project data versions';
CREATE TABLE project_service.reward_versions (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
reward_id uuid NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL
);
CREATE TABLE project_service.rewards (
id uuid DEFAULT public.uuid_generate_v4() NOT NULL PRIMARY KEY,
project_id uuid NOT NULL,
data jsonb DEFAULT '{}'::jsonb NOT NULL,
created_at timestamp without time zone DEFAULT now() NOT NULL,
updated_at timestamp without time zone DEFAULT now() NOT NULL,
platform_id uuid NOT NULL,
external_id text
);
ALTER TABLE ONLY community_service.users
ADD CONSTRAINT uidx_platform_email UNIQUE (platform_id, email);
ALTER TABLE ONLY community_service.users
ADD CONSTRAINT uniq_users_ext_id UNIQUE (platform_id, external_id);
ALTER TABLE ONLY community_service.users
ADD CONSTRAINT users_key_key UNIQUE (key);
ALTER TABLE ONLY core.core_settings
ADD CONSTRAINT core_settings_name_key UNIQUE (name);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT uniq_payments_ext_id UNIQUE (platform_id, external_id);
ALTER TABLE ONLY platform_service.platform_api_keys
ADD CONSTRAINT platform_api_keys_token_key UNIQUE (token);
ALTER TABLE ONLY platform_service.platforms
ADD CONSTRAINT platforms_token_key UNIQUE (token);
ALTER TABLE ONLY platform_service.users
ADD CONSTRAINT uidx_users_email UNIQUE (email);
ALTER TABLE ONLY platform_service.platform_users
ADD CONSTRAINT uuidx_user_and_platform UNIQUE (user_id, platform_id);
ALTER TABLE ONLY project_service.projects
ADD CONSTRAINT projects_key_key UNIQUE (key);
ALTER TABLE ONLY project_service.projects
ADD CONSTRAINT uniq_projects_ext_id UNIQUE (platform_id, external_id);
ALTER TABLE ONLY project_service.rewards
ADD CONSTRAINT uniq_rewards_ext_id UNIQUE (platform_id, external_id);
ALTER TABLE ONLY project_service.projects
ADD CONSTRAINT unq_permalink_on_platform UNIQUE (platform_id, permalink);
ALTER TABLE ONLY community_service.user_versions
ADD CONSTRAINT user_versions_user_id_fkey FOREIGN KEY (user_id) REFERENCES community_service.users(id);
ALTER TABLE ONLY community_service.users
ADD CONSTRAINT users_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY payment_service.catalog_payment_versions
ADD CONSTRAINT catalog_payment_versions_catalog_payment_id_fkey FOREIGN KEY (catalog_payment_id) REFERENCES payment_service.catalog_payments(id);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT catalog_payments_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT catalog_payments_project_id_fkey FOREIGN KEY (project_id) REFERENCES project_service.projects(id);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT catalog_payments_reward_id_fkey FOREIGN KEY (reward_id) REFERENCES project_service.rewards(id);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT catalog_payments_subscription_id_fkey FOREIGN KEY (subscription_id) REFERENCES payment_service.subscriptions(id);
ALTER TABLE ONLY payment_service.catalog_payments
ADD CONSTRAINT catalog_payments_user_id_fkey FOREIGN KEY (user_id) REFERENCES community_service.users(id);
ALTER TABLE ONLY payment_service.credit_cards
ADD CONSTRAINT credit_cards_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY payment_service.credit_cards
ADD CONSTRAINT credit_cards_user_id_fkey FOREIGN KEY (user_id) REFERENCES community_service.users(id);
ALTER TABLE ONLY payment_service.payment_status_transitions
ADD CONSTRAINT payment_status_transitions_catalog_payment_id_fkey FOREIGN KEY (catalog_payment_id) REFERENCES payment_service.catalog_payments(id);
ALTER TABLE ONLY payment_service.subscription_status_transitions
ADD CONSTRAINT subscription_status_transitions_subscription_id_fkey FOREIGN KEY (subscription_id) REFERENCES payment_service.subscriptions(id);
ALTER TABLE ONLY payment_service.subscriptions
ADD CONSTRAINT subscriptions_credit_card_id_fkey FOREIGN KEY (credit_card_id) REFERENCES payment_service.credit_cards(id);
ALTER TABLE ONLY payment_service.subscriptions
ADD CONSTRAINT subscriptions_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY payment_service.subscriptions
ADD CONSTRAINT subscriptions_project_id_fkey FOREIGN KEY (project_id) REFERENCES project_service.projects(id);
ALTER TABLE ONLY payment_service.subscriptions
ADD CONSTRAINT subscriptions_reward_id_fkey FOREIGN KEY (reward_id) REFERENCES project_service.rewards(id);
ALTER TABLE ONLY payment_service.subscriptions
ADD CONSTRAINT subscriptions_user_id_fkey FOREIGN KEY (user_id) REFERENCES community_service.users(id);
ALTER TABLE ONLY platform_service.platform_api_keys
ADD CONSTRAINT platform_api_keys_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY platform_service.platform_users
ADD CONSTRAINT platform_users_user_id_fkey FOREIGN KEY (user_id) REFERENCES platform_service.users(id);
ALTER TABLE ONLY project_service.project_versions
ADD CONSTRAINT project_versions_project_id_fkey FOREIGN KEY (project_id) REFERENCES project_service.projects(id);
ALTER TABLE ONLY project_service.projects
ADD CONSTRAINT projects_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY project_service.projects
ADD CONSTRAINT projects_user_id_fkey FOREIGN KEY (user_id) REFERENCES community_service.users(id);
ALTER TABLE ONLY project_service.reward_versions
ADD CONSTRAINT reward_versions_reward_id_fkey FOREIGN KEY (reward_id) REFERENCES project_service.rewards(id);
ALTER TABLE ONLY project_service.rewards
ADD CONSTRAINT rewards_platform_id_fkey FOREIGN KEY (platform_id) REFERENCES platform_service.platforms(id);
ALTER TABLE ONLY project_service.rewards
ADD CONSTRAINT rewards_project_id_fkey FOREIGN KEY (project_id) REFERENCES project_service.projects(id);
CREATE FUNCTION core.url_encode(data bytea) RETURNS text
LANGUAGE sql
AS $$
SELECT translate(encode(data, 'base64'), E'+/=\n', '-_');
$$;
CREATE FUNCTION core.algorithm_sign(signables text, secret text, algorithm text) RETURNS text
LANGUAGE sql
AS $$
WITH
alg AS (
SELECT CASE
WHEN algorithm = 'HS256' THEN 'sha256'
WHEN algorithm = 'HS384' THEN 'sha384'
WHEN algorithm = 'HS512' THEN 'sha512'
ELSE '' END AS id) -- hmac throws error
SELECT core.url_encode(hmac(signables, secret, alg.id)) FROM alg;
$$;
CREATE FUNCTION core.current_platform_token() RETURNS uuid
LANGUAGE plpgsql STABLE
AS $$
BEGIN
RETURN COALESCE(
current_setting('request.jwt.claim.platform_token', true)::uuid,
current_setting('request.header.platform-code')::uuid);
EXCEPTION
WHEN others THEN
RETURN NULL::integer;
END
$$;
COMMENT ON FUNCTION core.current_platform_token() IS 'Get platform uuid token from jwt';
CREATE FUNCTION core.current_platform_id() RETURNS uuid
LANGUAGE sql STABLE
AS $$
select id from platform_service.platforms where token = core.current_platform_token();
$$;
CREATE FUNCTION core.current_user_id() RETURNS uuid
LANGUAGE plpgsql STABLE
AS $$
BEGIN
RETURN nullif(current_setting('request.jwt.claim.user_id'), '')::uuid;
EXCEPTION
WHEN others THEN
RETURN NULL::uuid;
END
$$;
COMMENT ON FUNCTION core.current_user_id() IS 'Returns the user_id decoded on jwt';
CREATE FUNCTION core.force_any_of_roles(roles text[]) RETURNS void
LANGUAGE plpgsql STABLE
AS $_$
declare
begin
if not core.has_any_of_roles($1) then
raise exception insufficient_privilege;
end if;
end;
$_$;
COMMENT ON FUNCTION core.force_any_of_roles(roles text[]) IS 'raise insufficient_privilege when current role not in any of requested roles';
CREATE FUNCTION core.force_ip_address() RETURNS text
LANGUAGE sql
AS $$
select current_setting('request.header.x-forwarded-for');
$$;
COMMENT ON FUNCTION core.force_ip_address() IS 'Get ip address form request header or raise error';
CREATE FUNCTION core.get_setting(character varying) RETURNS text
LANGUAGE sql STABLE
AS $_$
select value from core.core_settings cs where cs.name = $1
$_$;
COMMENT ON FUNCTION core.get_setting(character varying) IS 'Get a value from a core settings on database';
CREATE FUNCTION core.has_any_of_roles(roles text[]) RETURNS boolean
LANGUAGE sql STABLE
AS $$
select current_role = ANY(roles);
$$;
COMMENT ON FUNCTION core.has_any_of_roles(roles text[]) IS 'check if current role in any of requested roles';
CREATE FUNCTION core.is_owner_or_admin(uuid) RETURNS boolean
LANGUAGE sql STABLE
AS $_$
SELECT
core.current_user_id() = $1
OR current_user = 'platform_user';
$_$;
COMMENT ON FUNCTION core.is_owner_or_admin(uuid) IS 'Check if current_role is admin or passed id match with current_user_id';
CREATE FUNCTION core.project_exists_on_platform(project_id uuid, platform_id uuid) RETURNS boolean
LANGUAGE sql STABLE
AS $_$
select exists(
select true
from project_service.projects p
where p.id = $1
and p.platform_id = $2
)
$_$;
COMMENT ON FUNCTION core.project_exists_on_platform(project_id uuid, platform_id uuid) IS 'check if project id exists on platform';
CREATE FUNCTION core.request_ip_address() RETURNS text
LANGUAGE sql
AS $$
select current_setting('request.header.x-forwarded-for', true);
$$;
CREATE FUNCTION core.sign(payload json, secret text, algorithm text DEFAULT 'HS256'::text) RETURNS text
LANGUAGE sql
AS $$
WITH
header AS (
SELECT core.url_encode(convert_to('{"alg":"' || algorithm || '","typ":"JWT"}', 'utf8')) AS data
),
payload AS (
SELECT core.url_encode(convert_to(payload::text, 'utf8')) AS data
),
signables AS (
SELECT header.data || '.' || payload.data AS data FROM header, payload
)
SELECT
signables.data || '.' ||
core.algorithm_sign(signables.data, secret, algorithm) FROM signables;
$$;
CREATE FUNCTION core.gen_jwt_token(json) RETURNS core.jwt_token
LANGUAGE sql STABLE
AS $_$
select core.sign($1::json, core.get_setting('jwt_secret'));
$_$;
COMMENT ON FUNCTION core.gen_jwt_token(json) IS 'Generate a signed jwt';
CREATE FUNCTION core.url_decode(data text) RETURNS bytea
LANGUAGE sql
AS $$
WITH t AS (SELECT translate(data, '-_', '+/') AS trans),
rem AS (SELECT length(t.trans) % 4 AS remainder FROM t) -- compute padding size
SELECT decode(
t.trans ||
CASE WHEN rem.remainder > 0
THEN repeat('=', (4 - rem.remainder))
ELSE '' END,
'base64') FROM t, rem;
$$;
CREATE FUNCTION core.user_exists_on_platform(user_id uuid, platform_id uuid) RETURNS boolean
LANGUAGE sql STABLE
AS $_$
select exists(
select true
from community_service.users u
where u.id = $1
and u.platform_id = $2
)
$_$;
COMMENT ON FUNCTION core.user_exists_on_platform(user_id uuid, platform_id uuid) IS 'Check if user_id exists on platform';
CREATE FUNCTION core.verify(token text, secret text, algorithm text DEFAULT 'HS256'::text) RETURNS TABLE(header json, payload json, valid boolean)
LANGUAGE sql
AS $$
SELECT
convert_from(core.url_decode(r[1]), 'utf8')::json AS header,
convert_from(core.url_decode(r[2]), 'utf8')::json AS payload,
r[3] = core.algorithm_sign(r[1] || '.' || r[2], secret, algorithm) AS valid
FROM regexp_split_to_array(token, '\.') r;
$$;
CREATE FUNCTION community_service._serialize_user_basic_data(json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip')::text,
'name', ($1->>'name')::text,
'email', ($1->>'email')::email,
'document_number', replace(replace(replace(($1->>'document_number')::text, '.', ''), '/', ''), '-', ''),
'born_at', ($1->>'born_at')::date,
'document_type', ($1->>'document_type')::text,
'legal_account_type', ($1->>'legal_account_type')::text,
'address', json_build_object(
'street', ($1->'address'->>'street')::text,
'street_number', ($1->'address'->>'street_number')::text,
'neighborhood', ($1->'address'->>'neighborhood')::text,
'zipcode', ($1->'address'->>'zipcode')::text,
'country', ($1->'address'->>'country')::text,
'state', ($1->'address'->>'state')::text,
'city', ($1->'address'->>'city')::text,
'complementary', ($1->'address'->>'complementary')::text
),
'phone', json_build_object(
'ddi', ($1->'phone'->>'ddi')::text,
'ddd', ($1->'phone'->>'ddd')::text,
'number', ($1->'phone'->>'number')::text
),
'bank_account', json_build_object(
'bank_code', ($1->'bank_account'->>'bank_code')::text,
'account', ($1->'bank_account'->>'account')::text,
'account_digit', ($1->'bank_account'->>'account_digit')::text,
'agency', ($1->'bank_account'->>'agency')::text,
'agency_digit', ($1->'bank_account'->>'agency_digit')::text
),
'metadata', ($1->>'metadata')::json
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION community_service._serialize_user_basic_data(json, with_default json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip')::text,
'name', coalesce(($1->>'name')::text, ($2->>'name')::text),
'email', coalesce(($1->>'email')::email, ($2->>'email')::email),
'document_number', replace(replace(replace(coalesce(($1->>'document_number')::text, ($2->>'document_number')::text), '.', ''), '/', ''), '-', ''),
'born_at', coalesce(($1->>'born_at')::date, ($2->>'born_at')::date),
'document_type', coalesce(($1->>'document_type')::text, ($2->>'document_type')::text),
'legal_account_type', coalesce(($1->>'legal_account_type')::text, ($2->>'legal_account_type')::text),
'address', json_build_object(
'street', coalesce(($1->'address'->>'street')::text, ($2->'address'->>'street')::text),
'street_number', coalesce(($1->'address'->>'street_number')::text, ($2->'address'->>'street_number')::text),
'neighborhood', coalesce(($1->'address'->>'neighborhood')::text, ($2->'address'->>'neighborhood')::text),
'zipcode', coalesce(($1->'address'->>'zipcode')::text, ($2->'address'->>'zipcode')::text),
'country', coalesce(($1->'address'->>'country')::text, ($2->'address'->>'country')::text),
'state', coalesce(($1->'address'->>'state')::text, ($2->'address'->>'state')::text),
'city', coalesce(($1->'address'->>'city')::text, ($2->'address'->>'city')::text),
'complementary', coalesce(($1->'address'->>'complementary')::text, ($2->'address'->>'complementary')::text)
),
'phone', json_build_object(
'ddi', coalesce(($1->'phone'->>'ddi')::text, ($2->'phone'->>'ddi')::text),
'ddd', coalesce(($1->'phone'->>'ddd')::text, ($2->'phone'->>'ddd')::text),
'number', coalesce(($1->'phone'->>'number')::text, ($2->'phone'->>'number')::text)
),
'bank_account', json_build_object(
'bank_code', coalesce(($1->'bank_account'->>'bank_code')::text, ($2->'bank_account'->>'bank_code')::text),
'account', coalesce(($1->'bank_account'->>'account')::text, ($2->'bank_account'->>'account')::text),
'account_digit', coalesce(($1->'bank_account'->>'account_digit')::text, ($2->'bank_account'->>'account_digit')::text),
'agency', coalesce(($1->'bank_account'->>'agency')::text, ($2->'bank_account'->>'agency')::text),
'agency_digit', coalesce(($1->'bank_account'->>'agency_digit')::text, ($2->'bank_account'->>'agency_digit')::text)
),
'metadata', coalesce(($1->>'metadata')::json, ($2->>'metadata')::json)
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION community_service_api.create_scoped_user_session(id uuid) RETURNS json
LANGUAGE plpgsql STABLE
AS $_$
declare
_platform platform_service.platforms;
_user community_service.users;
_jwt core.jwt_token;
_result json;
begin
-- ensure that roles come from any permitted
perform core.force_any_of_roles('{platform_user}');
select * from community_service.users cu
where cu.platform_id = core.current_platform_id()
and cu.id = $1
into _user;
if _user is null then
raise exception 'invalid user id';
end if;
select core.gen_jwt_token(json_build_object(
'role', 'scoped_user',
'user_id', _user.id,
'platform_token', core.current_platform_token(),
'exp', extract(epoch from now())::integer + (60*60)*2
)) into _jwt;
select json_build_object(
'token', _jwt.token
) into _result;
return _result;
end;
$_$;
COMMENT ON FUNCTION community_service_api.create_scoped_user_session(id uuid) IS 'Create a token for scoped user in community';
CREATE FUNCTION community_service_api."user"(data json) RETURNS json
LANGUAGE plpgsql
AS $_$
declare
_user community_service.users;
_platform platform_service.platforms;
_refined jsonb;
_result json;
_passwd text;
_version community_service.user_versions;
begin
-- ensure that roles come from any permitted
perform core.force_any_of_roles('{platform_user,scoped_user}');
-- get user if id is provided or scoped_user
if current_role = 'platform_user' and ($1->>'id')::uuid is not null then
select * from community_service.users
where id = ($1->>'id')::uuid
and platform_id = core.current_platform_id()
into _user;
if _user.id is null then
raise 'user not found';
end if;
elsif current_role = 'scoped_user' then
select * from community_service.users
where id = core.current_user_id()
and platform_id = core.current_platform_id()
into _user;
if _user.id is null then
raise 'user not found';
end if;
end if;
-- insert current_ip into refined
_refined := jsonb_set($1::jsonb, '{current_ip}'::text[], to_jsonb(coalesce(($1->>'current_ip')::text, core.force_ip_address())));
-- generate user basic data structure with received json
if _user.id is not null then
_refined := community_service._serialize_user_basic_data($1, _user.data::json);
-- insert old user data to version
insert into community_service.user_versions(user_id, data)
values (_user.id, row_to_json(_user.*)::jsonb)
returning * into _version;
-- update user data
update community_service.users
set data = _refined,
email = _refined->>'email'
where id = _user.id
returning * into _user;
else
-- geenrate user basic data
_refined := community_service._serialize_user_basic_data($1);
-- check if password already encrypted
_passwd := (case when ($1->>'password_encrypted'::text) = 'true' then
($1->>'password')::text
else
crypt(($1->>'password')::text, gen_salt('bf'))
end);
-- insert user in current platform
insert into community_service.users (external_id, platform_id, email, password, data, created_at, updated_at)
values (($1->>'external_id')::text,
core.current_platform_id(),
($1)->>'email',
_passwd,
_refined::jsonb,
coalesce(($1->>'created_at')::timestamp, now()),
coalesce(($1->>'updated_at')::timestamp, now())
)
returning * into _user;
-- insert user version
insert into community_service.user_versions(user_id, data)
values (_user.id, row_to_json(_user.*)::jsonb)
returning * into _version;
end if;
select json_build_object(
'id', _user.id,
'old_version_id', _version.id,
'data', _refined
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION core_validator.is_empty(_value text) RETURNS boolean
LANGUAGE sql IMMUTABLE
AS $$
select nullif(btrim(_value, ' '), '') is null;
$$;
COMMENT ON FUNCTION core_validator.is_empty(_value text) IS 'check if a text is empty';
CREATE FUNCTION core_validator.raise_when_empty(_value text, _label text) RETURNS text
LANGUAGE plpgsql IMMUTABLE
AS $$
declare
begin
if nullif(btrim(_value, ' '), '') is null then
raise 'missing field %', _label;
end if;
return btrim(_value, ' ');
end;
$$;
COMMENT ON FUNCTION core_validator.raise_when_empty(_value text, _label text) IS 'Raise when value::text is missing';
CREATE FUNCTION payment_service.__extractor_for_pagarme(gateway_data json) RETURNS json
LANGUAGE plpgsql STABLE
AS $_$
declare
_transaction json;
_payables json;
_payable_data record;
begin
_transaction := ($1->>'transaction')::json;
_payables := ($1->>'payables')::json;
-- build basic payable data to reuse on default strcuture
select sum((p->>'fee')::decimal) as total_fee,
max((p->>'payment_date')) as last_payable_date,
min((p->>'payment_date')) as first_payable_date,
array_to_json(array_agg(json_build_object(
'id', (p->>'id')::text,
'type', (p->>'type')::text,
'status', (p->>'status')::text,
'installment', (p->>'installment')::integer,
'payment_date', (p->>'payment_date')::timestamp,
'transaction_id', (p->>'transaction_id')::text,
'anticipation_fee', (p->>'anticipation_fee')::text
))) as payables
from json_array_elements(_payables) as p
into _payable_data;
-- build payment basic stucture from gateway
return json_build_object(
'gateway_ip', _transaction ->> 'ip'::text,
'gateway_id', _transaction ->> 'id'::text,
'gateway_cost', (_transaction ->> 'cost')::decimal,
'gateway_payment_method', (_transaction ->> 'payment_method')::text,
'gateway_status', (_transaction ->> 'status')::text,
'gateway_status_reason', (_transaction ->> 'status_reason')::text,
'gateway_refuse_reason', (_transaction ->> 'refuse_reason')::text,
'gateway_acquirer_response_code', (_transaction ->> 'acquirer_response_code')::text,
'boleto_url', (_transaction ->> 'boleto_url')::text,
'boleto_barcode', (_transaction ->> 'boleto_barcode')::text,
'boleto_expiration_date', (_transaction ->> 'boleto_expiration_date')::timestamp,
'installments', (_transaction ->> 'installments')::text,
'customer_name', (_transaction -> 'customer' ->> 'name')::text,
'customer_email', (_transaction -> 'customer' ->> 'email')::text,
'customer_document_number', (_transaction -> 'customer' ->> 'document_number')::text,
'customer_document_type', (_transaction -> 'customer' ->> 'document_type')::text,
'card_id', (_transaction -> 'card' ->> 'id')::text,
'card_holder_name', (_transaction -> 'card' ->> 'holder_name')::text,
'card_first_digits', (_transaction -> 'card' ->> 'first_digits')::text,
'card_last_digits', (_transaction -> 'card' ->> 'last_digits')::text,
'card_fingerprint', (_transaction -> 'card' ->> 'fingerprint')::text,
'card_country', (_transaction -> 'card' ->> 'country')::text,
'card_brand', (_transaction -> 'card' ->> 'brand')::text,
'payable_total_fee', _payable_data.total_fee::decimal,
'payable_first_compensation_date', _payable_data.first_payable_date::timestamp,
'payable_last_compensation_date', _payable_data.last_payable_date::timestamp,
'payables', _payable_data.payables::json
);
end;
$_$;
COMMENT ON FUNCTION payment_service.__extractor_for_pagarme(gateway_data json) IS 'generate basic gateway_data structure for gateways';
CREATE FUNCTION payment_service._extract_from_gateway_to_data(gateway text, gateway_data json) RETURNS json
LANGUAGE plpgsql STABLE
AS $_$
declare
begin
return (
case $1
when 'pagarme' then
payment_service.__extractor_for_pagarme($2)
else
null::json
end
);
end;
$_$;
COMMENT ON FUNCTION payment_service._extract_from_gateway_to_data(gateway text, gateway_data json) IS 'route gateway response data to a extractor to generate default structure over payment';
CREATE FUNCTION payment_service._serialize_payment_basic_data(json) RETURNS json
LANGUAGE plpgsql STABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', core_validator.raise_when_empty(($1->>'current_ip')::text, 'ip_address')::text,
'anonymous', core_validator.raise_when_empty(($1->>'anonymous')::text, 'anonymous')::boolean,
'amount', core_validator.raise_when_empty((($1->>'amount')::decimal)::text, 'amount')::decimal,
'payment_method', core_validator.raise_when_empty(lower(($1->>'payment_method')::text), 'payment_method'),
'customer', json_build_object(
'name', core_validator.raise_when_empty(($1->'customer'->>'name')::text, 'name'),
'email', core_validator.raise_when_empty(($1->'customer'->>'email')::text, 'email'),
'document_number', core_validator.raise_when_empty(($1->'customer'->>'document_number')::text, 'document_number'),
'address', json_build_object(
'street', core_validator.raise_when_empty(($1->'customer'->'address'->>'street')::text, 'street'),
'street_number', core_validator.raise_when_empty(($1->'customer'->'address'->>'street_number')::text, 'street_number'),
'neighborhood', core_validator.raise_when_empty(($1->'customer'->'address'->>'neighborhood')::text, 'neighborhood'),
'zipcode', core_validator.raise_when_empty(($1->'customer'->'address'->>'zipcode')::text, 'zipcode'),
'country', core_validator.raise_when_empty(($1->'customer'->'address'->>'country')::text, 'country'),
'state', core_validator.raise_when_empty(($1->'customer'->'address'->>'state')::text, 'state'),
'city', core_validator.raise_when_empty(($1->'customer'->'address'->>'city')::text, 'city'),
'complementary', ($1->'customer'->'address'->>'complementary')::text
),
'phone', json_build_object(
'ddi', core_validator.raise_when_empty(($1->'customer'->'phone'->>'ddi')::text, 'phone_ddi'),
'ddd', core_validator.raise_when_empty(($1->'customer'->'phone'->>'ddd')::text, 'phone_ddd'),
'number', core_validator.raise_when_empty(($1->'customer'->'phone'->>'number')::text, 'phone_number')
)
)
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION payment_service._serialize_subscription_basic_data(json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip')::text,
'is_international', coalesce(($1->>'is_international')::boolean, false),
'amount', core_validator.raise_when_empty((($1->>'amount')::integer)::text, 'amount'),
'payment_method', core_validator.raise_when_empty(lower(($1->>'payment_method')::text), 'payment_method'),
'customer', json_build_object(
'address', json_build_object(
'street', core_validator.raise_when_empty(($1->'customer'->'address'->>'street')::text, 'street'),
'street_number', core_validator.raise_when_empty(($1->'customer'->'address'->>'street_number')::text, 'street_number'),
'neighborhood', core_validator.raise_when_empty(($1->'customer'->'address'->>'neighborhood')::text, 'neighborhood'),
'zipcode', core_validator.raise_when_empty(($1->'customer'->'address'->>'zipcode')::text, 'zipcode'),
'country', core_validator.raise_when_empty(($1->'customer'->'address'->>'country')::text, 'country'),
'state', core_validator.raise_when_empty(($1->'customer'->'address'->>'state')::text, 'state'),
'city', core_validator.raise_when_empty(($1->'customer'->'address'->>'city')::text, 'city'),
'complementary', ($1->'customer'->'address'->>'complementary')::text
),
'phone', json_build_object(
'ddi', core_validator.raise_when_empty(($1->'customer'->'phone'->>'ddi')::text, 'phone_ddi'),
'ddd', core_validator.raise_when_empty(($1->'customer'->'phone'->>'ddd')::text, 'phone_ddd'),
'number', core_validator.raise_when_empty(($1->'customer'->'phone'->>'number')::text, 'phone_number')
)
)
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION payment_service.paid_transition_at(payment payment_service.catalog_payments) RETURNS timestamp without time zone
LANGUAGE sql STABLE
AS $_$
select created_at from payment_service.payment_status_transitions
where catalog_payment_id = $1.id
and to_status = 'paid'::payment_service.payment_status
order by id desc limit 1;
$_$;
CREATE FUNCTION payment_service.subscriptions_charge(time_interval interval DEFAULT '1 mon'::interval) RETURNS json
LANGUAGE plpgsql
AS $_$
declare
_result json;
_subscription payment_service.subscriptions;
_last_paid_payment payment_service.catalog_payments;
_new_payment payment_service.catalog_payments;
_refined jsonb;
_affected_subscriptions_ids uuid[];
_card_id text;
_user community_service.users;
_total_affected integer;
begin
_total_affected := 0;
-- get all subscriptions that not have any pending payment and last paid payment is over interval
for _subscription IN (select s.*
from payment_service.subscriptions s
left join lateral (
-- get last paid payment after interval
select
cp.*
from payment_service.catalog_payments cp
where cp.subscription_id = s.id
and cp.status = 'paid'
order by id desc limit 1
) as last_paid_payment on true
left join lateral (
-- get last paymnent (sometimes we can have a pending, refused... after a paid)
-- and ensure that payment is greater or same that is paid
select
cp.*
from payment_service.catalog_payments cp
where cp.subscription_id = s.id
and id > last_paid_payment.id
order by id desc limit 1
) as last_payment on true
where last_paid_payment.id is not null
and (payment_service.paid_transition_at(last_paid_payment.*) + $1::interval) <= now()
and (last_payment.id is null or last_payment.status in ('refused', 'paid'))
-- check only for subscriptions that in paid
and s.status in ('active'))
loop
select * from payment_service.catalog_payments
where subscription_id = _subscription.id
and status = 'paid'
order by id desc limit 1
into _last_paid_payment;
select * from community_service.users
where id = _subscription.user_id
into _user;
-- check if last paid payment is boleto or credit card
_total_affected := _total_affected + 1;
_affected_subscriptions_ids := array_append(_affected_subscriptions_ids, _subscription.id);
_refined := _subscription.checkout_data;
-- set customer name/email/document number from user
_refined := jsonb_set(_refined, '{customer,name}', to_jsonb((_user.data->>'name')::text));
_refined := jsonb_set(_refined, '{customer,email}', to_jsonb((_user.data->>'email')::text));
_refined := jsonb_set(_refined, '{customer,document_number}', to_jsonb((_user.data->>'document_number')::text));
-- check if last payment method is credit card
if (_refined ->> 'payment_method')::text = 'credit_card' then
-- replace card_id with last gateway general data card_id
select gateway_data->>'id'::text from payment_service.credit_cards
where id = _subscription.credit_card_id
into _card_id;
_refined := jsonb_set(_refined, '{card_id}'::text[], to_jsonb(_card_id));
_refined := _refined - 'card_hash';
end if;
insert into payment_service.catalog_payments(gateway, platform_id, project_id, user_id, subscription_id, data)
values (_last_paid_payment.gateway, _subscription.platform_id, _subscription.project_id, _subscription.user_id, _subscription.id, _refined)
returning * into _new_payment;
perform pg_notify('process_payments_channel',
json_build_object('id', _new_payment.id, 'subscription_id', _subscription.id)::text);
end loop;
_result := json_build_object(
'total_affected', _total_affected,
'affected_ids', _affected_subscriptions_ids
);
return _result;
end;
$_$;
CREATE FUNCTION payment_service.transition_to(payment payment_service.catalog_payments, status payment_service.payment_status, reason json) RETURNS boolean
LANGUAGE plpgsql
AS $_$
declare
begin
-- check if to state is same from state
if $1.status = $2 then
return false;
end if;
-- generate a new payment status transition
insert into payment_service.payment_status_transitions (catalog_payment_id, from_status, to_status, data)
values ($1.id, $1.status, $2, ($3)::jsonb);
-- update the payment status
update payment_service.catalog_payments
set status = $2
where id = $1.id;
return true;
end;
$_$;
COMMENT ON FUNCTION payment_service.transition_to(payment payment_service.catalog_payments, status payment_service.payment_status, reason json) IS 'payment state machine';
CREATE FUNCTION payment_service.transition_to(subscription payment_service.subscriptions, status payment_service.subscription_status, reason json) RETURNS boolean
LANGUAGE plpgsql
AS $_$
declare
begin
-- check if to state is same from state
if $1.status = $2 then
return false;
end if;
-- generate a new subscription status transition
insert into payment_service.subscription_status_transitions (subscription_id, from_status, to_status, data)
values ($1.id, $1.status, $2, ($3)::jsonb);
-- update the subscription status
update payment_service.subscriptions
set status = $2
where id = $1.id;
return true;
end;
$_$;
COMMENT ON FUNCTION payment_service.transition_to(subscription payment_service.subscriptions, status payment_service.subscription_status, reason json) IS 'subscription state machine';
CREATE FUNCTION payment_service_api.pay(data json) RETURNS json
LANGUAGE plpgsql
AS $_$
declare
_result json;
_payment payment_service.catalog_payments;
_user_id uuid;
_user community_service.users;
_version payment_service.catalog_payment_versions;
_credit_card payment_service.credit_cards;
_subscription payment_service.subscriptions;
_reward project_service.rewards;
_refined jsonb;
_external_id text;
begin
-- ensure that roles come from any permitted
perform core.force_any_of_roles('{platform_user, scoped_user}');
-- check roles to define how user_id is set
if current_role = 'platform_user' then
_user_id := ($1 ->> 'user_id')::uuid;
_external_id := ($1 ->> 'external_id')::uuid;
else
_user_id := core.current_user_id();
end if;
-- check if project exists on platform
if ($1->>'project_id')::uuid is null
OR not core.project_exists_on_platform(($1->>'project_id')::uuid, core.current_platform_id()) then
raise exception 'project not found on platform';
end if;
-- set user into variable
select *
from community_service.users
where id = _user_id
and platform_id = core.current_platform_id()
into _user;
-- check if user exists on current platform
if _user.id is null then
raise exception 'missing user';
end if;
-- get and check if reward exists
if ($1->>'reward_id')::uuid is not null then
select * from project_service.rewards
where project_id = ($1->>'project_id')::uuid
and id = ($1->>'project_id')::uuid
into _reward;
if _reward.id is null then
raise 'reward not found';
end if;
if ($1->>'amount'::decimal) < (_reward.data->>'minimum_value')::decimal then
raise 'payment amount is bellow of reward minimum %', (_reward.data->>'minimum_value')::decimal;
end if;
end if;
-- fill ip address to received params
_refined := jsonb_set(($1)::jsonb, '{current_ip}'::text[], to_jsonb(core.force_ip_address()::text));
-- if user already has filled document_number/name/email should use then
if not core_validator.is_empty((_user.data->>'name')::text) then
_refined := jsonb_set(_refined, '{customer,name}', to_jsonb(_user.data->>'name'::text));
else
update community_service.users
set name = ($1->'customer'->>'name')::text
where id = _user.id;
end if;
if not core_validator.is_empty((_user.data->>'email')::text) then
_refined := jsonb_set(_refined, '{customer,email}', to_jsonb(_user.data->>'email'::text));
end if;
if not core_validator.is_empty((_user.data->>'document_number')::text) then
_refined := jsonb_set(_refined, '{customer,document_number}', to_jsonb(_user.data->>'document_number'::text));
else
select * from community_service.users;
update community_service.users
set data = jsonb_set(data, '{document_number}'::text[], ($1->'customer'->>'document_number'))
where id = _user.id;
end if;
-- fill with anonymous
_refined := jsonb_set(_refined, '{anonymous}'::text[], to_jsonb(coalesce(($1->>'anonymous')::boolean, false)));
-- generate a base structure to payment json
_refined := (payment_service._serialize_payment_basic_data((_refined)::json))::jsonb;
-- if payment_method is credit_card should check for card_hash or card_id
if _refined->>'payment_method'::text = 'credit_card' then
-- fill with credit_card_owner_document
_refined := jsonb_set(_refined, '{credit_card_owner_document}'::text[], to_jsonb(coalesce(($1->>'credit_card_owner_document')::text, '')));
-- fill with is_international
_refined := jsonb_set(_refined, '{is_international}'::text[], to_jsonb(coalesce(($1->>'is_international')::boolean, false)));
-- fill with save_card
_refined := jsonb_set(_refined, '{save_card}'::text[], to_jsonb(coalesce(($1->>'save_card')::boolean, false)));
-- check if card_hash or card_id is present
if core_validator.is_empty((($1)->>'card_hash')::text)
and core_validator.is_empty((($1)->>'card_id')::text) then
raise 'missing card_hash or card_id';
end if;
-- if has card_id check if user is card owner
if not core_validator.is_empty((($1)->>'card_id')::text) then
select cc.* from payment_service.credit_cards cc
where cc.user_id = _user_id and cc.id = (($1)->>'card_id')::uuid
into _credit_card;
if _credit_card.id is null then
raise 'invalid card_id';
end if;
_refined := jsonb_set(_refined, '{card_id}'::text[], to_jsonb(_credit_card.id::text));
elsif not core_validator.is_empty((($1)->>'card_hash')::text) then
_refined := jsonb_set(_refined, '{card_hash}'::text[], to_jsonb($1->>'card_hash'::text));
end if;
end if;
-- insert payment in table
insert into payment_service.catalog_payments (
external_i, platform_id, project_id, user_id, reward_id, data, gateway
) values (
_external_id,
core.current_platform_id(),
($1->>'project_id')::uuid,
_user_id,
_reward.id,
_refined,
coalesce(($1->>'gateway')::text, 'pagarme')
) returning * into _payment;
-- insert first payment version
insert into payment_service.catalog_payment_versions (
catalog_payment_id, data
) values ( _payment.id, _payment.data )
returning * into _version;
-- check if payment is a subscription to create one
if ($1->>'subscription') is not null and ($1->>'subscription')::boolean then
insert into payment_service.subscriptions (
platform_id, project_id, user_id, checkout_data
) values (_payment.platform_id, _payment.project_id, _payment.user_id, payment_service._serialize_subscription_basic_data(_payment.data::json)::jsonb)
returning * into _subscription;
update payment_service.catalog_payments
set subscription_id = _subscription.id
where id = _payment.id;
end if;
-- build result json with payment_id and subscription_id
select json_build_object(
'id', _payment.id,
'subscription_id', _subscription.id,
'old_version_id', _version.id
) into _result;
-- notify to backend processor via listen
PERFORM pg_notify('process_payments_channel',
json_build_object(
'id', _payment.id,
'subscription_id', _subscription.id,
'created_at', _payment.created_at::timestamp
)::text
);
return _result;
end;
$_$;
CREATE FUNCTION platform_service.user_in_platform(user_id uuid, platform_id uuid) RETURNS boolean
LANGUAGE sql STABLE
AS $_$
select exists(select true from platform_service.platform_users pu where pu.user_id = $1 and pu.platform_id = $2);
$_$;
COMMENT ON FUNCTION platform_service.user_in_platform(user_id uuid, platform_id uuid) IS 'Check if inputed user has access on inputed platform';
CREATE FUNCTION platform_service_api.create_platform(name text) RETURNS platform_service.platforms
LANGUAGE plpgsql
AS $_$
declare
platform platform_service.platforms;
begin
insert into platform_service.platforms(name)
values($1)
returning * into platform;
insert into platform_service.platform_users (user_id, platform_id)
values (core.current_user_id(), platform.id);
return platform;
end;
$_$;
COMMENT ON FUNCTION platform_service_api.create_platform(name text) IS 'Create a new platform on current logged platform user';
CREATE VIEW platform_service_api.api_keys AS
SELECT pak.id,
pak.platform_id,
pak.token,
pak.created_at,
pak.disabled_at
FROM (platform_service.platform_api_keys pak
JOIN platform_service.platform_users pu ON ((pu.platform_id = pak.platform_id)))
WHERE (core.is_owner_or_admin(pu.user_id) AND (pak.disabled_at IS NULL));
COMMENT ON VIEW platform_service_api.api_keys IS 'List all api keys from platform that user have access';
CREATE FUNCTION platform_service_api.generate_api_key(platform_id uuid) RETURNS platform_service_api.api_keys
LANGUAGE plpgsql
AS $_$
declare
_platform_token uuid;
_result platform_service.platform_api_keys;
begin
if not platform_service.user_in_platform(core.current_user_id(), $1) then
raise exception 'insufficient permissions to do this action';
end if;
select token from platform_service.platforms p where p.id = $1
into _platform_token;
insert into platform_service.platform_api_keys(platform_id, token)
values ($1, core.gen_jwt_token(json_build_object(
'role', 'platform_user',
'platform_token', _platform_token,
'gen_at', extract(epoch from now())::integer
)))
returning * into _result;
return _result;
end;
$_$;
COMMENT ON FUNCTION platform_service_api.generate_api_key(platform_id uuid) IS 'Generate a new API_KEY for given platform';
CREATE FUNCTION platform_service_api.login(email text, password text) RETURNS core.jwt_token
LANGUAGE plpgsql
AS $_$
declare
_user platform_service.users;
result core.jwt_token;
begin
select
u.*
from platform_service.users u
where lower(u.email) = lower($1)
and u.password = crypt($2, u.password)
into _user;
if _user is null then
raise invalid_password using message = 'invalid user or password';
end if;
select core.gen_jwt_token(
row_to_json(r)
) as token
from (
select
'platform_user' as role,
_user.id as user_id,
extract(epoch from now())::integer + (60*60)*2 as exp
) r
into result;
return result;
end;
$_$;
COMMENT ON FUNCTION platform_service_api.login(email text, password text) IS 'Handles with platform users authentication';
CREATE FUNCTION platform_service_api.sign_up(name text, email text, password text) RETURNS core.jwt_token
LANGUAGE plpgsql
AS $$
declare
_user platform_service.users;
result core.jwt_token;
begin
insert into platform_service.users(name, email, password)
values (name, email, crypt(password, gen_salt('bf')))
returning * into _user;
select core.gen_jwt_token(
row_to_json(r)
) as token
from (
select
'platform_user' as role,
_user.id as user_id,
extract(epoch from now())::integer + (60*60)*2 as exp
) r
into result;
return result;
end;
$$;
COMMENT ON FUNCTION platform_service_api.sign_up(name text, email text, password text) IS 'Handles with creation of new platform users';
CREATE FUNCTION project_service._serialize_project_basic_data(json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip')::text,
'name', core_validator.raise_when_empty(($1->>'name')::text, 'name'),
'status', ($1->>'status'::text),
'permalink', core_validator.raise_when_empty(($1->>'permalink')::text, 'permalink'),
'mode', core_validator.raise_when_empty((($1->>'mode')::project_service.project_mode)::text, 'mode'),
'about_html', ($1->>'about_html')::text,
'budget_html', ($1->>'budget_html')::text,
'online_days', ($1->>'online_days')::integer,
'cover_image_versions', ($1->>'cover_image_versions')::json,
'card_info', json_build_object(
'image_url', ($1->'card_info'->>'image_url')::text,
'title', ($1->'card_info'->>'title')::text,
'description', ($1->'card_info'->>'description')::text
),
'video_info', json_build_object(
'id', ($1->'video'->>'id')::text,
'provider', ($1->'video'->>'provider')::text,
'embed_url', ($1->'video'->>'embed_url')::text,
'thumb_url', ($1->'video'->>'cover_url')::text
),
'address', json_build_object(
'state', ($1->'address'->>'state')::text,
'city', ($1->'address'->>'city')::text
),
'metadata', ($1->>'metadata')::json
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION project_service._serialize_project_basic_data(json, with_default json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip'),
'name', core_validator.raise_when_empty(coalesce(($1->>'name')::text, ($2->>'name')::text), 'name'),
'status', coalesce(($1->>'status'::text), ($2->>'status'::text)),
'permalink', core_validator.raise_when_empty(coalesce(($1->>'permalink')::text, ($2->>'permalink')::text), 'permalink'),
'mode', core_validator.raise_when_empty(coalesce((($1->>'mode')::project_service.project_mode)::text,(($2->>'mode')::project_service.project_mode)::text), 'mode'),
'about_html', coalesce(($1->>'about_html')::text, ($2->>'about_html')::text),
'budget_html', coalesce(($1->>'budget_html')::text, ($2->>'budget_html')::text),
'online_days', coalesce(($1->>'online_days')::integer, ($2->>'online_days')::integer),
'cover_image_versions', coalesce(($1->>'cover_image_versions'), ($2->>'cover_image_versions'))::json,
'card_info', json_build_object(
'image_url', coalesce(($1->'card_info'->>'image_url'), ($2->'card_info'->>'image_url'))::text,
'title', coalesce(($1->'card_info'->>'title'), ($2->'card_info'->>'title'))::text,
'description', coalesce(($1->'card_info'->>'description'), ($2->'card_info'->>'description'))::text
),
'video_info', json_build_object(
'id', coalesce(($1->'video'->>'id'), ($2->'video'->>'id'))::text,
'provider', coalesce(($1->'video'->>'provider'), ($2->'video'->>'provider'))::text,
'embed_url', coalesce(($1->'video'->>'embed_url'), ($2->'video'->>'embed_url'))::text,
'thumb_url', coalesce(($1->'video'->>'cover_url'), ($2->'video'->>'cover_url'))::text
),
'address', json_build_object(
'state', coalesce(($1->'address'->>'state'), ($2->'address'->>'state'))::text,
'city', coalesce(($1->'address'->>'city'), ($2->'address'->>'city'))::text
),
'metadata', ($1->>'metadata')::json
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION project_service._serialize_reward_basic_data(json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', ($1->>'current_ip')::text,
'minimum_value', core_validator.raise_when_empty((($1->>'minimum_value')::decimal)::text, 'minimum_value')::decimal,
'maximum_contributions', core_validator.raise_when_empty((($1->>'maximum_contributions')::integer)::text, 'maximum_contributions')::integer,
'shipping_options', core_validator.raise_when_empty((($1->>'shipping_options')::project_service.shipping_options_enum)::text, 'shipping_options')::project_service.shipping_options_enum,
'deliver_at', ($1->>'deliver_at')::date,
'row_order', ($1->>'row_order')::integer,
'title', ($1->>'title')::text,
'description', ($1->>'description')::text,
'metadata', ($1->>'metadata')::json
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION project_service._serialize_reward_basic_data(json, with_default json) RETURNS json
LANGUAGE plpgsql IMMUTABLE
AS $_$
declare
_result json;
begin
select json_build_object(
'current_ip', coalesce(($1->>'current_ip')::text, ($2->>'current_ip')),
'minimum_value', core_validator.raise_when_empty(
coalesce((($1->>'minimum_value')::decimal)::text, ($2->>'minimum_value')::text), 'minimum_value')::decimal,
'maximum_contributions', core_validator.raise_when_empty(
coalesce((($1->>'maximum_contributions')::integer)::text, ($2->>'maximum_contributions')::text), 'maximum_contributions')::integer,
'shipping_options', core_validator.raise_when_empty(
coalesce((($1->>'shipping_options')::project_service.shipping_options_enum)::text, ($2->>'shipping_options')::text), 'shipping_options')::project_service.shipping_options_enum,
'deliver_at', coalesce(($1->>'deliver_at')::date, ($2->>'deliver_at')::date),
'row_order', coalesce(($1->>'row_order')::integer, ($2->>'row_order')::integer),
'title', coalesce(($1->>'title')::text, ($2->>'title')::text),
'description', coalesce(($1->>'description')::text, ($2->>'description')::text),
'metadata', coalesce(($1->>'metadata')::json, ($2->>'metadata')::json)
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION project_service_api.project(data json) RETURNS json
LANGUAGE plpgsql
AS $_$
declare
_platform platform_service.platforms;
_user community_service.users;
_result json;
_permalink text;
_refined jsonb;
_project project_service.projects;
_version project_service.project_versions;
_is_creating boolean default true;
_external_id text;
begin
-- ensure that roles come from any permitted
perform core.force_any_of_roles('{platform_user,scoped_user}');
-- get project if id on json
if ($1->>'id')::uuid is not null then
select * from project_service.projects
where id = ($1->>'id')::uuid
and platform_id = core.current_platform_id()
into _project;
-- check if user has permission to handle on project
if _project.id is null then
raise 'project not found';
end if;
if not core.is_owner_or_admin(_project.user_id) then
raise insufficient_privilege;
end if;
_is_creating := false;
end if;
-- select and check if user is on same platform
select * from community_service.users cu
where cu.id = (case when current_role = 'platform_user' then
coalesce(_project.user_id, ($1->>'user_id')::uuid)
else core.current_user_id() end)
and cu.platform_id = core.current_platform_id()
into _user;
if _user.id is null or not core.is_owner_or_admin(_user.id) then
raise exception 'invalid user';
end if;
-- check if permalink is provided
if core_validator.is_empty($1->>'permalink'::text) then
_permalink := unaccent(replace(lower($1->>'name'),' ','_'));
else
_permalink := unaccent(replace(lower($1->>'permalink'),' ','_'));
end if;
-- put first status on project
select jsonb_set($1::jsonb, '{status}'::text[], to_jsonb('draft'::text))
into _refined;
-- put generated permalink into refined json
select jsonb_set(_refined, '{permalink}'::text[], to_jsonb(_permalink::text))
into _refined;
-- put current request ip into refined json
select jsonb_set(_refined, '{current_ip}'::text[], to_jsonb(core.request_ip_address()))
into _refined;
-- check if is mode is provided and update when draft
if not core_validator.is_empty($1->>'mode'::text) and _project.status = 'draft' then
_refined := jsonb_set(_refined, '{mode}'::text[], to_jsonb($1->>'mode'::text));
end if;
if _is_creating then
-- redefined refined json with project basic serializer
select project_service._serialize_project_basic_data(_refined::json)::jsonb
into _refined;
if current_role = 'platform_user' then
_external_id := ($1->>'external_id')::text;
end if;
-- insert project
insert into project_service.projects (
external_id, platform_id, user_id, permalink, name, mode, data
) values (_external_id, core.current_platform_id(), _user.id, _permalink, ($1 ->> 'name')::text, ($1 ->> 'mode')::project_service.project_mode, _refined)
returning * into _project;
-- insert first version of project
insert into project_service.project_versions (
project_id, data
) values (_project.id, row_to_json(_project)::jsonb)
returning * into _version;
else
-- generate basic struct with given data
_refined := project_service._serialize_project_basic_data(_refined::json, _project.data::json)::jsonb;
-- insert old version of project on new version
insert into project_service.project_versions(project_id, data)
values (_project.id, row_to_json(_project)::jsonb)
returning * into _version;
-- update project with new generated data
update project_service.projects
set mode = (_refined ->> 'mode')::project_service.project_mode,
name = (_refined ->> 'name')::text,
permalink = (_refined ->> 'permalink')::text,
data = _refined
where id = _project.id
returning * into _project;
end if;
select json_build_object(
'id', _project.id,
'old_version_id', _version.id,
'permalink', _project.permalink,
'mode', _project.mode,
'status', _project.status,
'data', _project.data
) into _result;
return _result;
end;
$_$;
CREATE FUNCTION project_service_api.reward(data json) RETURNS json
LANGUAGE plpgsql
AS $_$
declare
_is_creating boolean default false;
_result json;
_reward project_service.rewards;
_project project_service.projects;
_version project_service.reward_versions;
_refined jsonb;
_created_at timestamp default now();
_external_id text;
begin
-- ensure that roles come from any permitted
perform core.force_any_of_roles('{platform_user, scoped_user}');
-- check if have a id on request
if ($1->>'id') is not null then
select * from project_service.rewards
where id = ($1->>'id')::uuid
into _reward;
-- get project
select * from project_service.projects
where id = _reward.project_id
into _project;
if _reward.id is null or _project.id is null then
raise 'resource not found';
end if;
else
_is_creating := true;
-- get project
select * from project_service.projects
where id = ($1->>'project_id')::uuid
and platform_id = core.current_platform_id()
into _project;
-- check if project exists
if _project.id is null then
raise 'project not found';
end if;
end if;
-- check if project user is owner
if not core.is_owner_or_admin(_project.user_id) then
raise exception insufficient_privilege;
end if;
-- add some default data to refined
_refined := jsonb_set(($1)::jsonb, '{current_ip}'::text[], to_jsonb(core.force_ip_address()::text));
-- check if is creating or updating
if _is_creating then
_refined := jsonb_set(_refined, '{shipping_options}'::text[], to_jsonb(
coalesce(($1->>'shipping_options')::project_service.shipping_options_enum, 'free')::text
));
_refined := jsonb_set(_refined, '{maximum_contributions}'::text[], to_jsonb(
coalesce(($1->>'maximum_contributions')::integer, 0)::text
));
_refined := project_service._serialize_reward_basic_data(_refined::json)::jsonb;
if current_role = 'platform_user' then
_external_id := ($1->>'external_id')::text;
_created_at := ($1->>'created_at')::timestamp;
end if;
-- insert new reward and version
insert into project_service.rewards (platform_id, external_id, project_id, data, created_at)
values (_project.platform_id, _external_id, _project.id, _refined, _created_at)
returning * into _reward;
insert into project_service.reward_versions(reward_id, data)
values (_reward.id, row_to_json(_reward.*)::jsonb)
returning * into _version;
else
_refined := project_service._serialize_reward_basic_data(_refined::json, _reward.data::json)::jsonb;
-- insert new version and update reward
insert into project_service.reward_versions(reward_id, data)
values (_reward.id, row_to_json(_reward.*)::jsonb)
returning * into _version;
update project_service.rewards
set data = _refined
where id = _reward.id
returning * into _reward;
end if;
select json_build_object(
'id', _reward.id,
'old_version_id', _version.id,
'data', _reward.data
) into _result;
return _result;
end;
$_$;
CREATE VIEW analytics_service_api.users_count AS
SELECT count(*) AS users
FROM community_service.users
WHERE (users.platform_id = core.current_platform_id());
COMMENT ON VIEW analytics_service_api.users_count IS 'Shows the number of users on actual platform';
CREATE VIEW community_service_api.users AS
SELECT u.external_id,
u.id,
(u.data ->> 'name'::text) AS name,
(u.data ->> 'public_name'::text) AS public_name,
(u.data ->> 'document_number'::text) AS document_number,
(u.data ->> 'document_type'::text) AS document_type,
(u.data ->> 'legal_account_type'::text) AS legal_account_type,
u.email,
((u.data ->> 'address'::text))::jsonb AS address,
((u.data ->> 'metadata'::text))::jsonb AS metadata,
((u.data ->> 'bank_account'::text))::jsonb AS bank_account,
u.created_at,
u.updated_at
FROM community_service.users u
WHERE (u.platform_id = core.current_platform_id());
CREATE VIEW payment_service_api.payments AS
SELECT cp.id,
cp.subscription_id,
((cp.data ->> 'amount'::text))::numeric AS amount,
cp.project_id,
cp.status,
payment_service.paid_transition_at(cp.*) AS paid_at,
cp.created_at,
p.status AS project_status,
p.mode AS project_mode,
(cp.data ->> 'payment_method'::text) AS payment_method,
CASE
WHEN core.is_owner_or_admin(cp.user_id) THEN ((cp.data ->> 'customer'::text))::json
ELSE NULL::json
END AS billing_data,
CASE
WHEN (core.is_owner_or_admin(cp.user_id) AND ((cp.data ->> 'payment_method'::text) = 'credit_card'::text)) THEN json_build_object('first_digits', (cp.gateway_general_data ->> 'card_first_digits'::text), 'last_digits', (cp.gateway_general_data ->> 'card_last_digits'::text), 'brand', (cp.gateway_general_data ->> 'card_brand'::text), 'country', (cp.gateway_general_data ->> 'card_country'::text))
WHEN (core.is_owner_or_admin(cp.user_id) AND ((cp.data ->> 'payment_method'::text) = 'boleto'::text)) THEN json_build_object('barcode', (cp.gateway_general_data ->> 'boleto_barcode'::text), 'url', (cp.gateway_general_data ->> 'boleto_url'::text), 'expiration_date', ((cp.gateway_general_data ->> 'boleto_expiration_date'::text))::timestamp without time zone)
ELSE NULL::json
END AS payment_method_details
FROM ((payment_service.catalog_payments cp
JOIN project_service.projects p ON ((p.id = cp.project_id)))
JOIN community_service.users u ON ((u.id = cp.user_id)))
WHERE ((cp.platform_id = core.current_platform_id()) AND (core.is_owner_or_admin(cp.user_id) OR core.is_owner_or_admin(p.user_id)))
ORDER BY cp.id DESC;
CREATE VIEW payment_service_api.subscriptions AS
SELECT s.id,
s.project_id,
CASE
WHEN core.is_owner_or_admin(s.user_id) THEN s.credit_card_id
ELSE NULL::uuid
END AS credit_card_id,
CASE
WHEN core.is_owner_or_admin(s.user_id) THEN stats.paid_count
ELSE NULL::bigint
END AS paid_count,
CASE
WHEN core.is_owner_or_admin(s.user_id) THEN stats.total_paid
ELSE (NULL::bigint)::numeric
END AS total_paid,
s.status,
payment_service.paid_transition_at(last_paid_payment.*) AS paid_at,
(payment_service.paid_transition_at(last_paid_payment.*) + '1 mon'::interval) AS next_charge_at,
((((s.checkout_data - 'card_id'::text) - 'card_hash'::text) - 'current_ip'::text) || jsonb_build_object('customer', (((s.checkout_data ->> 'customer'::text))::jsonb || jsonb_build_object('name', (u.data ->> 'name'::text), 'email', (u.data ->> 'email'::text), 'document_number', (u.data ->> 'document_number'::text))))) AS checkout_data,
s.created_at
FROM (((((payment_service.subscriptions s
JOIN project_service.projects p ON ((p.id = s.project_id)))
JOIN community_service.users u ON ((u.id = s.user_id)))
LEFT JOIN LATERAL ( SELECT sum(((cp.data ->> 'amount'::text))::numeric) AS total_paid,
count(1) FILTER (WHERE (cp.status = 'paid'::payment_service.payment_status)) AS paid_count,
count(1) FILTER (WHERE (cp.status = 'refused'::payment_service.payment_status)) AS refused_count
FROM payment_service.catalog_payments cp
WHERE (cp.subscription_id = s.id)) stats ON (true))
LEFT JOIN LATERAL ( SELECT *
FROM payment_service.catalog_payments cp
WHERE ((cp.subscription_id = s.id) AND (cp.status = 'paid'::payment_service.payment_status))
ORDER BY cp.id DESC
LIMIT 1) last_paid_payment ON (true))
LEFT JOIN LATERAL ( SELECT *
FROM payment_service.catalog_payments cp
WHERE (cp.subscription_id = s.id)
ORDER BY cp.id DESC
LIMIT 1) last_payment ON (true))
WHERE ((s.platform_id = core.current_platform_id()) AND (core.is_owner_or_admin(s.user_id) OR core.is_owner_or_admin(p.user_id)));
CREATE VIEW project_service_api.projects AS
SELECT p.id,
p.external_id,
p.user_id,
p.permalink,
p.mode,
p.name
FROM project_service.projects p
WHERE ((p.platform_id = core.current_platform_id()) AND core.has_any_of_roles('{platform_user}'::text[]));
CREATE VIEW project_service_api.rewards AS
SELECT r.id,
r.external_id,
r.project_id,
r.data,
((r.data ->> 'metadata'::text))::jsonb AS metadata,
r.created_at,
r.updated_at
FROM project_service.rewards r
WHERE ((r.platform_id = core.current_platform_id()) AND core.has_any_of_roles('{platform_user}'::text[]))
ORDER BY r.id DESC;
CREATE TRIGGER set_updated_at BEFORE UPDATE ON core.core_settings FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON payment_service.credit_cards FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON payment_service.catalog_payments FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON platform_service.platforms FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON platform_service.users FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON platform_service.platform_users FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
CREATE TRIGGER set_updated_at BEFORE UPDATE ON project_service.project_versions FOR EACH ROW EXECUTE PROCEDURE public.diesel_set_updated_at();
GRANT USAGE ON SCHEMA analytics_service TO scoped_user;
GRANT USAGE ON SCHEMA analytics_service TO platform_user;
GRANT USAGE ON SCHEMA analytics_service TO postgrest;
GRANT USAGE ON SCHEMA analytics_service TO admin;
GRANT USAGE ON SCHEMA analytics_service_api TO scoped_user;
GRANT USAGE ON SCHEMA analytics_service_api TO platform_user;
GRANT USAGE ON SCHEMA analytics_service_api TO postgrest;
GRANT USAGE ON SCHEMA analytics_service_api TO admin;
GRANT USAGE ON SCHEMA community_service TO platform_user;
GRANT USAGE ON SCHEMA community_service TO postgrest;
GRANT USAGE ON SCHEMA community_service TO anonymous;
GRANT USAGE ON SCHEMA community_service TO admin;
GRANT USAGE ON SCHEMA community_service TO scoped_user;
GRANT USAGE ON SCHEMA community_service_api TO platform_user;
GRANT USAGE ON SCHEMA community_service_api TO anonymous;
GRANT USAGE ON SCHEMA community_service_api TO postgrest;
GRANT USAGE ON SCHEMA community_service_api TO admin;
GRANT USAGE ON SCHEMA community_service_api TO scoped_user;
GRANT USAGE ON SCHEMA core TO scoped_user;
GRANT USAGE ON SCHEMA core TO platform_user;
GRANT USAGE ON SCHEMA core TO anonymous;
GRANT USAGE ON SCHEMA core_validator TO scoped_user;
GRANT USAGE ON SCHEMA core_validator TO platform_user;
GRANT USAGE ON SCHEMA core_validator TO postgrest;
GRANT USAGE ON SCHEMA core_validator TO admin;
GRANT USAGE ON SCHEMA payment_service TO scoped_user;
GRANT USAGE ON SCHEMA payment_service TO platform_user;
GRANT USAGE ON SCHEMA payment_service TO postgrest;
GRANT USAGE ON SCHEMA payment_service TO admin;
GRANT USAGE ON SCHEMA payment_service_api TO scoped_user;
GRANT USAGE ON SCHEMA payment_service_api TO platform_user;
GRANT USAGE ON SCHEMA payment_service_api TO postgrest;
GRANT USAGE ON SCHEMA payment_service_api TO admin;
GRANT USAGE ON SCHEMA platform_service TO scoped_user;
GRANT USAGE ON SCHEMA platform_service TO platform_user;
GRANT USAGE ON SCHEMA platform_service TO anonymous;
GRANT USAGE ON SCHEMA platform_service TO admin;
GRANT USAGE ON SCHEMA platform_service_api TO admin;
GRANT USAGE ON SCHEMA platform_service_api TO platform_user;
GRANT USAGE ON SCHEMA platform_service_api TO anonymous;
GRANT USAGE ON SCHEMA project_service TO scoped_user;
GRANT USAGE ON SCHEMA project_service TO platform_user;
GRANT USAGE ON SCHEMA project_service TO postgrest;
GRANT USAGE ON SCHEMA project_service TO admin;
GRANT USAGE ON SCHEMA project_service_api TO scoped_user;
GRANT USAGE ON SCHEMA project_service_api TO platform_user;
GRANT USAGE ON SCHEMA project_service_api TO postgrest;
GRANT USAGE ON SCHEMA project_service_api TO admin;
GRANT ALL ON FUNCTION community_service_api.create_scoped_user_session(id uuid) TO platform_user;
GRANT ALL ON FUNCTION community_service_api."user"(data json) TO scoped_user;
GRANT ALL ON FUNCTION community_service_api."user"(data json) TO platform_user;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.catalog_payments TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.catalog_payments TO platform_user;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.catalog_payments TO admin;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.subscriptions TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.subscriptions TO platform_user;
GRANT SELECT,INSERT,UPDATE ON TABLE payment_service.subscriptions TO admin;
GRANT ALL ON FUNCTION payment_service_api.pay(data json) TO scoped_user;
GRANT ALL ON FUNCTION payment_service_api.pay(data json) TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.platforms TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.platforms TO admin;
GRANT SELECT ON TABLE platform_service.platforms TO scoped_user;
GRANT ALL ON FUNCTION platform_service_api.create_platform(name text) TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.platform_api_keys TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.platform_api_keys TO admin;
GRANT SELECT,INSERT ON TABLE platform_service.platform_users TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.platform_users TO admin;
GRANT SELECT ON TABLE platform_service_api.api_keys TO platform_user;
GRANT SELECT ON TABLE platform_service_api.api_keys TO admin;
GRANT ALL ON FUNCTION platform_service_api.generate_api_key(platform_id uuid) TO admin;
GRANT ALL ON FUNCTION platform_service_api.generate_api_key(platform_id uuid) TO platform_user;
GRANT ALL ON FUNCTION platform_service_api.login(email text, password text) TO anonymous;
GRANT ALL ON FUNCTION platform_service_api.sign_up(name text, email text, password text) TO anonymous;
GRANT ALL ON FUNCTION project_service_api.project(data json) TO scoped_user;
GRANT ALL ON FUNCTION project_service_api.project(data json) TO platform_user;
GRANT ALL ON FUNCTION project_service_api.reward(data json) TO scoped_user;
GRANT ALL ON FUNCTION project_service_api.reward(data json) TO platform_user;
GRANT SELECT ON TABLE community_service.users TO postgrest;
GRANT SELECT ON TABLE community_service.users TO admin;
GRANT SELECT,UPDATE ON TABLE community_service.users TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE community_service.users TO platform_user;
GRANT SELECT ON TABLE community_service.users TO anonymous;
GRANT SELECT ON TABLE analytics_service_api.users_count TO platform_user;
GRANT SELECT ON TABLE analytics_service_api.users_count TO admin;
GRANT SELECT ON TABLE analytics_service_api.users_count TO scoped_user;
GRANT SELECT,INSERT ON TABLE community_service.user_versions TO scoped_user;
GRANT SELECT,INSERT ON TABLE community_service.user_versions TO platform_user;
GRANT SELECT ON TABLE community_service_api.users TO platform_user;
GRANT SELECT ON TABLE core.core_settings TO platform_user;
GRANT SELECT ON TABLE core.core_settings TO anonymous;
GRANT SELECT ON TABLE core.core_settings TO scoped_user;
GRANT SELECT,INSERT ON TABLE payment_service.catalog_payment_versions TO scoped_user;
GRANT SELECT,INSERT ON TABLE payment_service.catalog_payment_versions TO platform_user;
GRANT SELECT ON TABLE payment_service.payment_status_transitions TO platform_user;
GRANT SELECT ON TABLE payment_service.payment_status_transitions TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.projects TO platform_user;
GRANT SELECT ON TABLE project_service.projects TO anonymous;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.projects TO admin;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.projects TO scoped_user;
GRANT SELECT ON TABLE payment_service_api.payments TO platform_user;
GRANT SELECT ON TABLE payment_service_api.payments TO scoped_user;
GRANT SELECT ON TABLE payment_service_api.subscriptions TO platform_user;
GRANT SELECT ON TABLE payment_service_api.subscriptions TO scoped_user;
GRANT SELECT,INSERT ON TABLE platform_service.users TO platform_user;
GRANT SELECT,INSERT ON TABLE platform_service.users TO anonymous;
GRANT SELECT,INSERT ON TABLE platform_service.users TO admin;
GRANT SELECT,INSERT ON TABLE project_service.project_versions TO scoped_user;
GRANT SELECT,INSERT ON TABLE project_service.project_versions TO platform_user;
GRANT SELECT,INSERT ON TABLE project_service.project_versions TO admin;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.reward_versions TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.reward_versions TO platform_user;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.rewards TO scoped_user;
GRANT SELECT,INSERT,UPDATE ON TABLE project_service.rewards TO platform_user;
GRANT SELECT ON TABLE project_service_api.projects TO platform_user;
GRANT SELECT ON TABLE project_service_api.rewards TO platform_user; | the_stack |
CREATE TABLE IF NOT EXISTS `zstack`.`SNSSmsEndpointVO`
(
`uuid` varchar(32) NOT NULL UNIQUE,
PRIMARY KEY (`uuid`),
CONSTRAINT fkSNSSmsEndpointVOSNSApplicationEndpointVO FOREIGN KEY (uuid) REFERENCES SNSApplicationEndpointVO (uuid) ON UPDATE RESTRICT ON DELETE CASCADE
) ENGINE = InnoDB
DEFAULT CHARSET = utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`SNSSmsReceiverVO`
(
`uuid` varchar(32) NOT NULL UNIQUE,
`phoneNumber` varchar(24) NOT NULL,
`endpointUuid` varchar(32) NOT NULL,
`type` varchar(24) NOT NULL,
`description` varchar(255) DEFAULT NULL,
`lastOpDate` timestamp ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp,
PRIMARY KEY (`uuid`),
CONSTRAINT fkSNSSmsReceiverVOSNSSmsEndpointVO FOREIGN KEY (endpointUuid) REFERENCES SNSSmsEndpointVO (uuid)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`AliyunSmsSNSTextTemplateVO`
(
`uuid` varchar(32) NOT NULL UNIQUE,
`sign` varchar(24) NOT NULL,
`alarmTemplateCode` varchar(24) NOT NULL,
`eventTemplateCode` varchar(24) NOT NULL,
`eventTemplate` text,
PRIMARY KEY (`uuid`),
CONSTRAINT fkAliyunSmsSNSTextTemplateVOSNSTextTemplateVO FOREIGN KEY (uuid) REFERENCES SNSTextTemplateVO (uuid) ON UPDATE RESTRICT ON DELETE CASCADE
) ENGINE = InnoDB
DEFAULT CHARSET = utf8;
# Remove the unique constrain of name in HybridAccountVO, the second one is imported by someone on 3.4.0
ALTER TABLE HybridAccountVO
DROP INDEX name;
ALTER TABLE HybridAccountVO
DROP INDEX name_2;
CREATE TABLE IF NOT EXISTS `InstallPathRecycleVO` (
`trashId` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
`resourceUuid` varchar(32) NOT NULL,
`resourceType` varchar(32) NOT NULL,
`storageUuid` varchar(32) NOT NULL,
`storageType` varchar(32) NOT NULL,
`installPath` varchar(1024) NOT NULL,
`hostUuid` varchar(32) DEFAULT NULL,
`hypervisorType` varchar(32) DEFAULT NULL,
`trashType` varchar(32) NOT NULL,
`isFolder` boolean NOT NULL DEFAULT FALSE,
`size` bigint unsigned NOT NULL,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`trashId`),
UNIQUE KEY `trashId` (`trashId`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE `zstack`.`VmPriorityConfigVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`level` VARCHAR(255) NOT NULL UNIQUE,
`cpuShares` int NOT NULL,
`oomScoreAdj` int NOT NULL,
`lastOpDate` timestamp ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
ALTER TABLE `SNSEmailEndpointVO` modify column email varchar(1024) DEFAULT NULL;
CREATE TABLE IF NOT EXISTS `zstack`.`SNSEmailAddressVO` (
`uuid` varchar(32) NOT NULL UNIQUE,
`emailAddress` varchar(1024) NOT NULL,
`endpointUuid` varchar(32) NOT NULL,
`lastOpDate` timestamp ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
DROP PROCEDURE IF EXISTS upgradeEmailAddressFromEndpoint;
DELIMITER $$
CREATE PROCEDURE upgradeEmailAddressFromEndpoint()
BEGIN
DECLARE email_address varchar(1024);
DECLARE endpoint_uuid varchar(32);
DECLARE email_address_count INT DEFAULT 0;
DECLARE email_address_uuid varchar(32);
DECLARE done INT DEFAULT FALSE;
DECLARE cur CURSOR FOR SELECT uuid, email FROM zstack.SNSEmailEndpointVO WHERE `email` IS NOT NULL;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
OPEN cur;
read_loop: LOOP
FETCH cur INTO endpoint_uuid, email_address;
IF done THEN
LEAVE read_loop;
END IF;
SELECT count(*) INTO email_address_count FROM zstack.SNSEmailAddressVO WHERE emailAddress = email_address and endpointUuid = endpoint_uuid;
IF (email_address_count = 0) THEN
SET email_address_uuid = REPLACE(UUID(), '-', '');
INSERT INTO ResourceVO (`uuid`, `resourceName`, `resourceType`, `concreteResourceType`)
VALUES (email_address_uuid, NULL, 'SNSEmailAddressVO', 'org.zstack.sns.platform.email.SNSEmailAddressVO');
INSERT INTO `SNSEmailAddressVO` (`uuid`, `emailAddress`, `endpointUuid`, `createDate`, `lastOpDate`)
VALUES (email_address_uuid, email_address, endpoint_uuid, NOW(), NOW());
END IF;
END LOOP;
CLOSE cur;
SELECT CURTIME();
END $$
DELIMITER ;
CALL upgradeEmailAddressFromEndpoint();
DROP PROCEDURE IF EXISTS upgradeEmailAddressFromEndpoint;
UPDATE zstack.SNSEmailEndpointVO SET email = NULL;
-- ----------------------------
-- For multicast router
-- ----------------------------
CREATE TABLE IF NOT EXISTS `zstack`.`MulticastRouterVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`description` VARCHAR(2048) DEFAULT NULL,
`state` VARCHAR(32) NOT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`MulticastRouterRendezvousPointVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`multicastRouterUuid` VARCHAR(32) NOT NULL,
`rpAddress` VARCHAR(64) NOT NULL,
`groupAddress` VARCHAR(64) NOT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
CONSTRAINT fkMultiCastRouterRendezvousPointVOMulticastRouterVO FOREIGN KEY (multicastRouterUuid) REFERENCES MulticastRouterVO (uuid) ON DELETE CASCADE,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`MulticastRouterVpcVRouterRefVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`vpcRouterUuid` VARCHAR(32) NOT NULL,
CONSTRAINT fkMulticastRouterVpcVRouterRefVOMulticastRouterVO FOREIGN KEY (uuid) REFERENCES MulticastRouterVO (uuid) ON DELETE CASCADE,
CONSTRAINT fkMulticastRouterVpcVRouterRefVOVpcRouterVmVO FOREIGN KEY (vpcRouterUuid) REFERENCES VpcRouterVmVO (uuid) ON DELETE CASCADE,
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE INDEX idxLongJobVOapiId ON LongJobVO (apiId);
ALTER TABLE `zstack`.`RoleVO` ADD COLUMN identity VARCHAR(64) DEFAULT NULL;
-- FOR GUEST TOOLS
CREATE TABLE IF NOT EXISTS `zstack`.`GuestToolsVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`name` varchar(255) DEFAULT "",
`description` varchar(2048) DEFAULT NULL,
`managementNodeUuid` VARCHAR(32) NOT NULL,
`architecture` VARCHAR(32) NOT NULL,
`hypervisorType` VARCHAR(32) NOT NULL,
`version` VARCHAR(32) NOT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
ALTER TABLE `BillingResourceLabelVO` modify labelKey varchar(255) NOT NULL;
-- add h3c hardware sdn
-- ----------------------------
CREATE TABLE IF NOT EXISTS `SdnControllerVO` (
`uuid` VARCHAR(32) NOT NULL UNIQUE,
`vendorType` VARCHAR(255) NOT NULL,
`name` VARCHAR(255) NOT NULL,
`description` VARCHAR(2048) DEFAULT NULL,
`ip` VARCHAR(255) NOT NULL,
`username` VARCHAR(255) NOT NULL,
`password` VARCHAR(255) NOT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `HardwareL2VxlanNetworkPoolVO` (
`uuid` varchar(32) NOT NULL UNIQUE,
`sdnControllerUuid` VARCHAR(32) NOT NULL,
PRIMARY KEY (`uuid`),
CONSTRAINT fkHardwareL2VxlanNetworkPoolVOL2NetworkEO FOREIGN KEY (uuid) REFERENCES L2NetworkEO (uuid) ON UPDATE RESTRICT ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- For port mirror
-- ----------------------------
CREATE TABLE IF NOT EXISTS `zstack`.`PortMirrorVO` (
`uuid` VARCHAR(32) NOT NULL,
`name` VARCHAR(128) DEFAULT "",
`state` VARCHAR(128) DEFAULT "Enable",
`mirrorNetworkUuid` VARCHAR(32) NOT NULL,
`description` VARCHAR(1024) DEFAULT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`),
UNIQUE KEY `uuid` (`uuid`) USING BTREE,
CONSTRAINT `fkPortMirrorVOL3NetworkVO` FOREIGN KEY (`mirrorNetworkUuid`) REFERENCES `L3NetworkEO` (`uuid`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`PortMirrorSessionVO` (
`uuid` VARCHAR(32) NOT NULL,
`name` VARCHAR(128) NOT NULL,
`srcEndPoint` VARCHAR(32) NOT NULL,
`dstEndPoint` VARCHAR(32) NOT NULL,
`type` VARCHAR(32) NOT NULL,
`status` VARCHAR(128) DEFAULT 'Created',
`internalId` int unsigned NOT NULL,
`description` VARCHAR(1024) DEFAULT NULL,
`portMirrorUuid` VARCHAR(32) NOT NULL,
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`),
UNIQUE KEY `uuid` (`uuid`) USING BTREE,
CONSTRAINT `fkPortMirrorSessionVOPortMirrorVO` FOREIGN KEY (`portMirrorUuid`) REFERENCES `PortMirrorVO` (`uuid`) ON DELETE CASCADE,
CONSTRAINT `fkPortMirrorSessionVOSrcNIcVmNicVO` FOREIGN KEY (`srcEndPoint`) REFERENCES `VmNicVO` (`uuid`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE `zstack`.`PortMirrorSessionSequenceNumberVO` (
`id` int unsigned NOT NULL UNIQUE AUTO_INCREMENT,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`MirrorNetworkUsedIpVO` (
`uuid` VARCHAR(32) NOT NULL,
`hostUuid` VARCHAR(32) NOT NULL,
`clusterUuid` VARCHAR(32) NOT NULL,
`l3NetworkUuid` VARCHAR(32) NOT NULL,
`description` VARCHAR(1024) DEFAULT NULL,
PRIMARY KEY (`uuid`),
UNIQUE KEY `uuid` (`uuid`) USING BTREE,
CONSTRAINT `fkMirrorNetworkUsedIpVOL3NetworkEO` FOREIGN KEY (`l3NetworkUuid`) REFERENCES `L3NetworkEO` (`uuid`) ON DELETE CASCADE,
CONSTRAINT `fkMirrorNetworkUsedIpVOHostEO` FOREIGN KEY (`hostUuid`) REFERENCES `HostEO` (`uuid`) ON DELETE CASCADE,
CONSTRAINT `fkMirrorNetworkUsedIpVOClusterEO` FOREIGN KEY (`clusterUuid`) REFERENCES `ClusterEO` (`uuid`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `zstack`.`PortMirrorSessionMirrorNetworkRefVO` (
`uuid` VARCHAR(32) NOT NULL,
`sessionUuid` VARCHAR(32) NOT NULL,
`srcTunnelUuid` VARCHAR(32) NOT NULL,
`dstTunnelUuid` VARCHAR(32),
`type` VARCHAR(32) DEFAULT 'GRE',
`lastOpDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' ON UPDATE CURRENT_TIMESTAMP,
`createDate` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00',
PRIMARY KEY (`uuid`),
UNIQUE KEY `uuid` (`uuid`) USING BTREE,
CONSTRAINT `fkMirrorRefVOPortMirrorSessionVO` FOREIGN KEY (`sessionUuid`) REFERENCES `PortMirrorSessionVO` (`uuid`) ON DELETE CASCADE,
CONSTRAINT `fkMirrorRefVOMirrorNetworkUsedIpVOSrc` FOREIGN KEY (`srcTunnelUuid`) REFERENCES `MirrorNetworkUsedIpVO` (`uuid`) ON DELETE CASCADE,
CONSTRAINT `fkMirrorRefVOMirrorNetworkUsedIpVODst` FOREIGN KEY (`dstTunnelUuid`) REFERENCES `MirrorNetworkUsedIpVO` (`uuid`) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
alter table SNSTextTemplateVO add type varchar(255) DEFAULT "ALARM";
update SNSApplicationEndpointVO set type = "SYSTEM_HTTP" where name = "system-alarm-endpoint" and platformUuid = "02d24b9b0a7f4ee1846f15cda248ceb7" and type = "HTTP";
DROP PROCEDURE IF EXISTS addMissingResourceRef;
DELIMITER $$
CREATE PROCEDURE addMissingResourceRef()
BEGIN
DECLARE groupUuid VARCHAR(32);
DECLARE accountUuid VARCHAR(32);
DECLARE done INT DEFAULT FALSE;
DECLARE cur CURSOR FOR SELECT sgroup.uuid FROM `zstack`.`VolumeSnapshotGroupVO` sgroup
WHERE NOT EXISTS (SELECT resourceUuid FROM zstack.AccountResourceRefVO WHERE resourceUuid = sgroup.uuid);
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
OPEN cur;
read_loop: LOOP
FETCH cur INTO groupUuid;
IF done THEN
LEAVE read_loop;
END IF;
SELECT aref.accountUuid INTO accountUuid FROM `zstack`.`VolumeSnapshotGroupRefVO` sref, `zstack`.`AccountResourceRefVO` aref
WHERE sref.volumeSnapshotGroupUuid = groupUuid
AND sref.volumeType = 'Root'
AND sref.volumeSnapshotUuid = aref.resourceUuid;
IF accountUuid IS NOT NULL THEN
INSERT INTO `zstack`.`AccountResourceRefVO` (`resourceType`, `resourceUuid`, `accountUuid`, `ownerAccountUuid`, `concreteResourceType`, `permission`, `isShared`, `createDate`, `lastOpDate`)
VALUES ('VolumeSnapshotGroupVO', groupUuid, accountUuid, accountUuid, 'org.zstack.header.storage.snapshot.group.VolumeSnapshotGroupVO', 2, 0, NOW(), NOW());
END IF;
END LOOP;
CLOSE cur;
SELECT CURTIME();
END $$
DELIMITER ;
CALL addMissingResourceRef();
DROP PROCEDURE IF EXISTS addMissingResourceRef;
DROP PROCEDURE IF EXISTS modifyHostXfsFragAlarmDefaultThreshold;
DELIMITER $$
CREATE PROCEDURE modifyHostXfsFragAlarmDefaultThreshold()
BEGIN
DECLARE done INT DEFAULT FALSE;
DECLARE uuid VARCHAR(32);
DECLARE threshold DOUBLE;
DECLARE cur CURSOR FOR SELECT v.uuid,v.threshold FROM AlarmVO v WHERE v.uuid = "bf7359930ee444d286fb88d2e51acf51";
DECLARE CONTINUE HANDLER FOR NOT FOUND SET done = TRUE;
OPEN cur;
read_loop: LOOP
FETCH cur INTO uuid,threshold;
IF done THEN
LEAVE read_loop;
END IF;
IF threshold != "85" THEN
UPDATE AlarmVO v SET v.threshold = "85" WHERE v.uuid = uuid;
END IF;
END LOOP;
CLOSE cur;
SELECT CURTIME();
END $$
DELIMITER ;
call modifyHostXfsFragAlarmDefaultThreshold();
DROP PROCEDURE IF EXISTS modifyHostXfsFragAlarmDefaultThreshold; | the_stack |
--
-- PostgreSQL database dump
--
-- Dumped from database version 9.5.3
-- Dumped by pg_dump version 9.5.3
SET statement_timeout = 0;
SET lock_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SET check_function_bodies = false;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: biobolsas; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA biobolsas;
--
-- Name: bonsai; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA bonsai;
--
-- Name: common; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA common;
--
-- Name: demo; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA demo;
--
-- Name: flor; Type: SCHEMA; Schema: -; Owner: -
--
CREATE SCHEMA flor;
--
-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: -
--
CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
--
-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: -
--
COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
--
-- Name: hstore; Type: EXTENSION; Schema: -; Owner: -
--
CREATE EXTENSION IF NOT EXISTS hstore WITH SCHEMA public;
--
-- Name: EXTENSION hstore; Type: COMMENT; Schema: -; Owner: -
--
COMMENT ON EXTENSION hstore IS 'data type for storing sets of (key, value) pairs';
--
-- Name: pg_trgm; Type: EXTENSION; Schema: -; Owner: -
--
CREATE EXTENSION IF NOT EXISTS pg_trgm WITH SCHEMA public;
--
-- Name: EXTENSION pg_trgm; Type: COMMENT; Schema: -; Owner: -
--
COMMENT ON EXTENSION pg_trgm IS 'text similarity measurement and index searching based on trigrams';
SET search_path = demo, pg_catalog;
--
-- Name: simple_jsonb_to_hstore(jsonb); Type: FUNCTION; Schema: demo; Owner: -
--
CREATE FUNCTION simple_jsonb_to_hstore(jdata jsonb) RETURNS public.hstore
LANGUAGE sql
AS $$
select hstore(array_agg(key), array_agg(value))
from jsonb_each_text(jdata)
$$;
SET search_path = public, pg_catalog;
--
-- Name: array_intersection(anyarray, anyarray); Type: FUNCTION; Schema: public; Owner: -
--
CREATE FUNCTION array_intersection(anyarray, anyarray) RETURNS anyarray
LANGUAGE sql
AS $_$
SELECT ARRAY(
SELECT $1[i]
FROM generate_series( array_lower($1, 1), array_upper($1, 1) ) i
WHERE ARRAY[$1[i]] && $2
);
$_$;
SET search_path = biobolsas, pg_catalog;
SET default_tablespace = '';
SET default_with_oids = false;
--
-- Name: account_ledgers; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE account_ledgers (
id integer NOT NULL,
reference text,
currency character varying(255),
account_id integer,
account_balance numeric(14,2) DEFAULT 0.0,
account_to_id integer,
account_to_balance numeric(14,2) DEFAULT 0.0,
date date,
operation character varying(20),
amount numeric(14,2) DEFAULT 0.0,
exchange_rate numeric(14,4) DEFAULT 1.0,
creator_id integer,
approver_id integer,
approver_datetime timestamp without time zone,
nuller_id integer,
nuller_datetime timestamp without time zone,
inverse boolean DEFAULT false,
has_error boolean DEFAULT false,
error_messages character varying(255),
project_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status character varying(50) DEFAULT 'approved'::character varying,
updater_id integer,
name character varying(255),
contact_id integer
);
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE account_ledgers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE account_ledgers_id_seq OWNED BY account_ledgers.id;
--
-- Name: accounts; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE accounts (
id integer NOT NULL,
name character varying(255),
currency character varying(10),
exchange_rate numeric(14,4) DEFAULT 1.0,
amount numeric(14,2) DEFAULT 0.0,
type character varying(30),
contact_id integer,
project_id integer,
active boolean DEFAULT true,
description text,
date date,
state character varying(30),
has_error boolean DEFAULT false,
error_messages character varying(400),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
tax_percentage numeric(5,2) DEFAULT 0,
tax_id integer,
total numeric(14,2) DEFAULT 0,
tax_in_out boolean DEFAULT false,
extras jsonb,
creator_id integer,
approver_id integer,
nuller_id integer,
due_date date
);
--
-- Name: accounts_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE accounts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE accounts_id_seq OWNED BY accounts.id;
--
-- Name: attachments; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE attachments (
id integer NOT NULL,
attachment_uid character varying(255),
name character varying(255),
attachable_type character varying(255),
user_id integer,
"position" integer DEFAULT 0,
image boolean DEFAULT false,
size integer,
image_attributes json,
created_at timestamp without time zone,
updated_at timestamp without time zone,
attachable_id integer,
publish boolean DEFAULT false
);
--
-- Name: attachments_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE attachments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: attachments_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE attachments_id_seq OWNED BY attachments.id;
--
-- Name: contacts; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE contacts (
id integer NOT NULL,
matchcode character varying(255),
first_name character varying(100),
last_name character varying(100),
organisation_name character varying(100),
address character varying(250),
phone character varying(40),
mobile character varying(40),
email character varying(200),
tax_number character varying(30),
aditional_info character varying(250),
code character varying(255),
type character varying(255),
"position" character varying(255),
active boolean DEFAULT true,
staff boolean DEFAULT false,
client boolean DEFAULT false,
supplier boolean DEFAULT false,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
incomes_status character varying(300) DEFAULT '{}'::character varying,
expenses_status character varying(300) DEFAULT '{}'::character varying,
tag_ids integer[] DEFAULT '{}'::integer[],
encrypted_password character varying(255),
password_salt character varying(255),
login boolean DEFAULT false,
active_login boolean DEFAULT true
);
--
-- Name: contacts_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE contacts_id_seq OWNED BY contacts.id;
--
-- Name: histories; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE histories (
id integer NOT NULL,
user_id integer,
historiable_id integer,
new_item boolean DEFAULT false,
historiable_type character varying(255),
history_data json DEFAULT '{}'::json,
created_at timestamp without time zone,
klass_type character varying(255),
extras public.hstore,
all_data json DEFAULT '{}'::json
);
--
-- Name: histories_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE histories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: histories_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE histories_id_seq OWNED BY histories.id;
--
-- Name: inventories; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE inventories (
id integer NOT NULL,
contact_id integer,
store_id integer,
account_id integer,
date date,
ref_number character varying(255),
operation character varying(10),
description character varying(255),
total numeric(14,2) DEFAULT 0,
creator_id integer,
transference_id integer,
store_to_id integer,
project_id integer,
has_error boolean DEFAULT false,
error_messages character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
updater_id integer
);
--
-- Name: inventories_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE inventories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventories_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE inventories_id_seq OWNED BY inventories.id;
--
-- Name: inventory_details; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE inventory_details (
id integer NOT NULL,
inventory_id integer,
item_id integer,
store_id integer,
quantity numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: inventory_details_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE inventory_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventory_details_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE inventory_details_id_seq OWNED BY inventory_details.id;
--
-- Name: items; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE items (
id integer NOT NULL,
unit_id integer,
price numeric(14,2) DEFAULT 0.0,
name character varying(255),
description character varying(255),
code character varying(100),
for_sale boolean DEFAULT true,
stockable boolean DEFAULT true,
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
buy_price numeric(14,2) DEFAULT 0.0,
unit_symbol character varying(20),
unit_name character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
creator_id integer,
publish boolean DEFAULT false,
brand character varying(255)
);
--
-- Name: items_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE items_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: items_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE items_id_seq OWNED BY items.id;
--
-- Name: movement_details; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE movement_details (
id integer NOT NULL,
account_id integer,
item_id integer,
quantity numeric(14,2) DEFAULT 0.0,
price numeric(14,2) DEFAULT 0.0,
description character varying(255),
discount numeric(14,2) DEFAULT 0.0,
balance numeric(14,2) DEFAULT 0.0,
original_price numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: movement_details_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE movement_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: movement_details_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE movement_details_id_seq OWNED BY movement_details.id;
--
-- Name: pages; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE pages (
id integer NOT NULL,
shopping_store_id integer,
page_type character varying(255) DEFAULT 'layout'::character varying,
sections json DEFAULT '{}'::json,
name character varying(255),
section character varying(255),
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pages_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE pages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pages_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE pages_id_seq OWNED BY pages.id;
--
-- Name: projects; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE projects (
id integer NOT NULL,
name character varying(255),
active boolean DEFAULT true,
date_start date,
date_end date,
description text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: projects_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE schema_migrations (
version character varying(255) NOT NULL
);
--
-- Name: stocks; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE stocks (
id integer NOT NULL,
store_id integer,
item_id integer,
unitary_cost numeric(14,2) DEFAULT 0.0,
quantity numeric(14,2) DEFAULT 0.0,
minimum numeric(14,2) DEFAULT 0.0,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
active boolean DEFAULT true
);
--
-- Name: stocks_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE stocks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stocks_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE stocks_id_seq OWNED BY stocks.id;
--
-- Name: stores; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE stores (
id integer NOT NULL,
name character varying(255),
address character varying(255),
phone character varying(40),
active boolean DEFAULT true,
description character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: stores_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stores_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE stores_id_seq OWNED BY stores.id;
--
-- Name: tag_groups; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE tag_groups (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: tag_groups_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE tag_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE tag_groups_id_seq OWNED BY tag_groups.id;
--
-- Name: tags; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE tags (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: tags_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE tags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
--
-- Name: taxes; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE taxes (
id integer NOT NULL,
name character varying(100),
abreviation character varying(20),
percentage numeric(5,2) DEFAULT 0.0,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: taxes_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE taxes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: taxes_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE taxes_id_seq OWNED BY taxes.id;
--
-- Name: units; Type: TABLE; Schema: biobolsas; Owner: -
--
CREATE TABLE units (
id integer NOT NULL,
name character varying(100),
symbol character varying(20),
"integer" boolean DEFAULT false,
visible boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: units_id_seq; Type: SEQUENCE; Schema: biobolsas; Owner: -
--
CREATE SEQUENCE units_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: units_id_seq; Type: SEQUENCE OWNED BY; Schema: biobolsas; Owner: -
--
ALTER SEQUENCE units_id_seq OWNED BY units.id;
SET search_path = bonsai, pg_catalog;
--
-- Name: account_ledgers; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE account_ledgers (
id integer NOT NULL,
reference text,
currency character varying(255),
account_id integer,
account_balance numeric(14,2) DEFAULT 0.0,
account_to_id integer,
account_to_balance numeric(14,2) DEFAULT 0.0,
date date,
operation character varying(20),
amount numeric(14,2) DEFAULT 0.0,
exchange_rate numeric(14,4) DEFAULT 1.0,
creator_id integer,
approver_id integer,
approver_datetime timestamp without time zone,
nuller_id integer,
nuller_datetime timestamp without time zone,
inverse boolean DEFAULT false,
has_error boolean DEFAULT false,
error_messages character varying(255),
project_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status character varying(50) DEFAULT 'approved'::character varying,
updater_id integer,
name character varying(255),
contact_id integer
);
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE account_ledgers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE account_ledgers_id_seq OWNED BY account_ledgers.id;
--
-- Name: accounts; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE accounts (
id integer NOT NULL,
name character varying(255),
currency character varying(10),
exchange_rate numeric(14,4) DEFAULT 1.0,
amount numeric(14,2) DEFAULT 0.0,
type character varying(30),
contact_id integer,
project_id integer,
active boolean DEFAULT true,
description text,
date date,
state character varying(30),
has_error boolean DEFAULT false,
error_messages character varying(400),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
tax_percentage numeric(5,2) DEFAULT 0,
tax_id integer,
total numeric(14,2) DEFAULT 0,
tax_in_out boolean DEFAULT false,
extras jsonb,
creator_id integer,
approver_id integer,
nuller_id integer,
due_date date
);
--
-- Name: accounts_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE accounts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE accounts_id_seq OWNED BY accounts.id;
--
-- Name: attachments; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE attachments (
id integer NOT NULL,
attachment_uid character varying(255),
name character varying(255),
attachable_type character varying(255),
user_id integer,
"position" integer DEFAULT 0,
image boolean DEFAULT false,
size integer,
image_attributes json,
created_at timestamp without time zone,
updated_at timestamp without time zone,
attachable_id integer,
publish boolean DEFAULT false
);
--
-- Name: attachments_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE attachments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: attachments_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE attachments_id_seq OWNED BY attachments.id;
--
-- Name: contacts; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE contacts (
id integer NOT NULL,
matchcode character varying(255),
first_name character varying(100),
last_name character varying(100),
organisation_name character varying(100),
address character varying(250),
phone character varying(40),
mobile character varying(40),
email character varying(200),
tax_number character varying(30),
aditional_info character varying(250),
code character varying(255),
type character varying(255),
"position" character varying(255),
active boolean DEFAULT true,
staff boolean DEFAULT false,
client boolean DEFAULT false,
supplier boolean DEFAULT false,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
incomes_status character varying(300) DEFAULT '{}'::character varying,
expenses_status character varying(300) DEFAULT '{}'::character varying,
tag_ids integer[] DEFAULT '{}'::integer[],
encrypted_password character varying(255),
password_salt character varying(255),
login boolean DEFAULT false,
active_login boolean DEFAULT true
);
--
-- Name: contacts_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE contacts_id_seq OWNED BY contacts.id;
--
-- Name: histories; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE histories (
id integer NOT NULL,
user_id integer,
historiable_id integer,
new_item boolean DEFAULT false,
historiable_type character varying(255),
history_data json DEFAULT '{}'::json,
created_at timestamp without time zone,
klass_type character varying(255),
extras public.hstore,
all_data json DEFAULT '{}'::json
);
--
-- Name: histories_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE histories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: histories_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE histories_id_seq OWNED BY histories.id;
--
-- Name: inventories; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE inventories (
id integer NOT NULL,
contact_id integer,
store_id integer,
account_id integer,
date date,
ref_number character varying(255),
operation character varying(10),
description character varying(255),
total numeric(14,2) DEFAULT 0,
creator_id integer,
transference_id integer,
store_to_id integer,
project_id integer,
has_error boolean DEFAULT false,
error_messages character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
updater_id integer
);
--
-- Name: inventories_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE inventories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventories_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE inventories_id_seq OWNED BY inventories.id;
--
-- Name: inventory_details; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE inventory_details (
id integer NOT NULL,
inventory_id integer,
item_id integer,
store_id integer,
quantity numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: inventory_details_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE inventory_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventory_details_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE inventory_details_id_seq OWNED BY inventory_details.id;
--
-- Name: items; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE items (
id integer NOT NULL,
unit_id integer,
price numeric(14,2) DEFAULT 0.0,
name character varying(255),
description character varying(255),
code character varying(100),
for_sale boolean DEFAULT true,
stockable boolean DEFAULT true,
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
buy_price numeric(14,2) DEFAULT 0.0,
unit_symbol character varying(20),
unit_name character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
creator_id integer,
publish boolean DEFAULT false,
brand character varying(255)
);
--
-- Name: items_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE items_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: items_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE items_id_seq OWNED BY items.id;
--
-- Name: movement_details; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE movement_details (
id integer NOT NULL,
account_id integer,
item_id integer,
quantity numeric(14,2) DEFAULT 0.0,
price numeric(14,2) DEFAULT 0.0,
description character varying(255),
discount numeric(14,2) DEFAULT 0.0,
balance numeric(14,2) DEFAULT 0.0,
original_price numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: movement_details_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE movement_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: movement_details_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE movement_details_id_seq OWNED BY movement_details.id;
--
-- Name: pages; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE pages (
id integer NOT NULL,
shopping_store_id integer,
page_type character varying(255) DEFAULT 'layout'::character varying,
sections json DEFAULT '{}'::json,
name character varying(255),
section character varying(255),
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pages_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE pages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pages_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE pages_id_seq OWNED BY pages.id;
--
-- Name: projects; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE projects (
id integer NOT NULL,
name character varying(255),
active boolean DEFAULT true,
date_start date,
date_end date,
description text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: projects_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE schema_migrations (
version character varying(255) NOT NULL
);
--
-- Name: stocks; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE stocks (
id integer NOT NULL,
store_id integer,
item_id integer,
unitary_cost numeric(14,2) DEFAULT 0.0,
quantity numeric(14,2) DEFAULT 0.0,
minimum numeric(14,2) DEFAULT 0.0,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
active boolean DEFAULT true
);
--
-- Name: stocks_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE stocks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stocks_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE stocks_id_seq OWNED BY stocks.id;
--
-- Name: stores; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE stores (
id integer NOT NULL,
name character varying(255),
address character varying(255),
phone character varying(40),
active boolean DEFAULT true,
description character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: stores_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stores_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE stores_id_seq OWNED BY stores.id;
--
-- Name: tag_groups; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE tag_groups (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: tag_groups_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE tag_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE tag_groups_id_seq OWNED BY tag_groups.id;
--
-- Name: tags; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE tags (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: tags_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE tags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
--
-- Name: taxes; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE taxes (
id integer NOT NULL,
name character varying(100),
abreviation character varying(20),
percentage numeric(5,2) DEFAULT 0.0,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: taxes_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE taxes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: taxes_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE taxes_id_seq OWNED BY taxes.id;
--
-- Name: units; Type: TABLE; Schema: bonsai; Owner: -
--
CREATE TABLE units (
id integer NOT NULL,
name character varying(100),
symbol character varying(20),
"integer" boolean DEFAULT false,
visible boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: units_id_seq; Type: SEQUENCE; Schema: bonsai; Owner: -
--
CREATE SEQUENCE units_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: units_id_seq; Type: SEQUENCE OWNED BY; Schema: bonsai; Owner: -
--
ALTER SEQUENCE units_id_seq OWNED BY units.id;
SET search_path = common, pg_catalog;
--
-- Name: links; Type: TABLE; Schema: common; Owner: -
--
CREATE TABLE links (
id integer NOT NULL,
organisation_id integer,
user_id integer,
settings character varying(255),
creator boolean DEFAULT false,
master_account boolean DEFAULT false,
role character varying(50),
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tenant character varying(100),
api_token character varying(255)
);
--
-- Name: links_id_seq; Type: SEQUENCE; Schema: common; Owner: -
--
CREATE SEQUENCE links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: links_id_seq; Type: SEQUENCE OWNED BY; Schema: common; Owner: -
--
ALTER SEQUENCE links_id_seq OWNED BY links.id;
--
-- Name: organisations; Type: TABLE; Schema: common; Owner: -
--
CREATE TABLE organisations (
id integer NOT NULL,
country_id integer,
name character varying(100),
address character varying(255),
address_alt character varying(255),
phone character varying(40),
phone_alt character varying(40),
mobile character varying(40),
email character varying(255),
website character varying(255),
user_id integer,
due_date date,
preferences text,
time_zone character varying(100),
tenant character varying(50),
currency character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
country_code character varying(5),
settings jsonb,
due_on date,
plan character varying(255) DEFAULT '2users'::character varying
);
--
-- Name: organizations_id_seq; Type: SEQUENCE; Schema: common; Owner: -
--
CREATE SEQUENCE organizations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: organizations_id_seq; Type: SEQUENCE OWNED BY; Schema: common; Owner: -
--
ALTER SEQUENCE organizations_id_seq OWNED BY organisations.id;
--
-- Name: shopping_stores; Type: TABLE; Schema: common; Owner: -
--
CREATE TABLE shopping_stores (
id integer NOT NULL,
name character varying(255),
publish boolean DEFAULT false,
active boolean DEFAULT false,
url character varying(255),
tenant character varying(255),
organisation_id integer,
configuration json DEFAULT '{}'::json,
cart_item_ids integer[] DEFAULT '{}'::integer[]
);
--
-- Name: shopping_stores_id_seq; Type: SEQUENCE; Schema: common; Owner: -
--
CREATE SEQUENCE shopping_stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: shopping_stores_id_seq; Type: SEQUENCE OWNED BY; Schema: common; Owner: -
--
ALTER SEQUENCE shopping_stores_id_seq OWNED BY shopping_stores.id;
--
-- Name: users; Type: TABLE; Schema: common; Owner: -
--
CREATE TABLE users (
id integer NOT NULL,
email character varying(255) NOT NULL,
first_name character varying(80),
last_name character varying(80),
phone character varying(40),
mobile character varying(40),
website character varying(200),
description character varying(255),
encrypted_password character varying(255),
password_salt character varying(255),
confirmation_token character varying(60),
confirmation_sent_at timestamp without time zone,
confirmed_at timestamp without time zone,
reset_password_token character varying(255),
reset_password_sent_at timestamp without time zone,
reseted_password_at timestamp without time zone,
sign_in_count integer DEFAULT 0,
last_sign_in_at timestamp without time zone,
change_default_password boolean DEFAULT false,
address character varying(255),
active boolean DEFAULT true,
auth_token character varying(255),
rol character varying(50),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
old_emails text[] DEFAULT '{}'::text[],
locale character varying DEFAULT 'en'::character varying
);
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: common; Owner: -
--
CREATE SEQUENCE users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: common; Owner: -
--
ALTER SEQUENCE users_id_seq OWNED BY users.id;
SET search_path = demo, pg_catalog;
--
-- Name: account_ledgers; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE account_ledgers (
id integer NOT NULL,
reference text,
currency character varying(255),
account_id integer,
account_balance numeric(14,2) DEFAULT 0.0,
account_to_id integer,
account_to_balance numeric(14,2) DEFAULT 0.0,
date date,
operation character varying(20),
amount numeric(14,2) DEFAULT 0.0,
exchange_rate numeric(14,4) DEFAULT 1.0,
creator_id integer,
approver_id integer,
approver_datetime timestamp without time zone,
nuller_id integer,
nuller_datetime timestamp without time zone,
inverse boolean DEFAULT false,
has_error boolean DEFAULT false,
error_messages character varying(255),
project_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status character varying(50) DEFAULT 'approved'::character varying,
updater_id integer,
name character varying(255),
contact_id integer
);
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE account_ledgers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE account_ledgers_id_seq OWNED BY account_ledgers.id;
--
-- Name: accounts; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE accounts (
id integer NOT NULL,
name character varying(255),
currency character varying(10),
exchange_rate numeric(14,4) DEFAULT 1.0,
amount numeric(14,2) DEFAULT 0.0,
type character varying(30),
contact_id integer,
project_id integer,
active boolean DEFAULT true,
description text,
date date,
state character varying(30),
has_error boolean DEFAULT false,
error_messages character varying(400),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
tax_percentage numeric(5,2) DEFAULT 0,
tax_id integer,
total numeric(14,2) DEFAULT 0,
tax_in_out boolean DEFAULT false,
extras jsonb,
creator_id integer,
approver_id integer,
nuller_id integer,
due_date date
);
--
-- Name: accounts_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE accounts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE accounts_id_seq OWNED BY accounts.id;
--
-- Name: attachments; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE attachments (
id integer NOT NULL,
attachment_uid character varying(255),
name character varying(255),
attachable_type character varying(255),
user_id integer,
"position" integer DEFAULT 0,
image boolean DEFAULT false,
size integer,
image_attributes json,
created_at timestamp without time zone,
updated_at timestamp without time zone,
attachable_id integer,
publish boolean DEFAULT false
);
--
-- Name: attachments_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE attachments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: attachments_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE attachments_id_seq OWNED BY attachments.id;
--
-- Name: contacts; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE contacts (
id integer NOT NULL,
matchcode character varying(255),
first_name character varying(100),
last_name character varying(100),
organisation_name character varying(100),
address character varying(250),
phone character varying(40),
mobile character varying(40),
email character varying(200),
tax_number character varying(30),
aditional_info character varying(250),
code character varying(255),
type character varying(255),
"position" character varying(255),
active boolean DEFAULT true,
staff boolean DEFAULT false,
client boolean DEFAULT false,
supplier boolean DEFAULT false,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
incomes_status character varying(300) DEFAULT '{}'::character varying,
expenses_status character varying(300) DEFAULT '{}'::character varying,
tag_ids integer[] DEFAULT '{}'::integer[],
encrypted_password character varying(255),
password_salt character varying(255),
login boolean DEFAULT false,
active_login boolean DEFAULT true
);
--
-- Name: contacts_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE contacts_id_seq OWNED BY contacts.id;
--
-- Name: histories; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE histories (
id integer NOT NULL,
user_id integer,
historiable_id integer,
new_item boolean DEFAULT false,
historiable_type character varying(255),
history_data json DEFAULT '{}'::json,
created_at timestamp without time zone,
klass_type character varying(255),
extras public.hstore,
all_data json DEFAULT '{}'::json
);
--
-- Name: histories_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE histories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: histories_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE histories_id_seq OWNED BY histories.id;
--
-- Name: inventories; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE inventories (
id integer NOT NULL,
contact_id integer,
store_id integer,
account_id integer,
date date,
ref_number character varying(255),
operation character varying(10),
description character varying(255),
total numeric(14,2) DEFAULT 0,
creator_id integer,
transference_id integer,
store_to_id integer,
project_id integer,
has_error boolean DEFAULT false,
error_messages character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
updater_id integer
);
--
-- Name: inventories_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE inventories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventories_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE inventories_id_seq OWNED BY inventories.id;
--
-- Name: inventory_details; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE inventory_details (
id integer NOT NULL,
inventory_id integer,
item_id integer,
store_id integer,
quantity numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: inventory_details_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE inventory_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventory_details_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE inventory_details_id_seq OWNED BY inventory_details.id;
--
-- Name: items; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE items (
id integer NOT NULL,
unit_id integer,
price numeric(14,2) DEFAULT 0.0,
name character varying(255),
description character varying(255),
code character varying(100),
for_sale boolean DEFAULT true,
stockable boolean DEFAULT true,
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
buy_price numeric(14,2) DEFAULT 0.0,
unit_symbol character varying(20),
unit_name character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
creator_id integer,
publish boolean DEFAULT false,
brand character varying(255)
);
--
-- Name: items_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE items_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: items_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE items_id_seq OWNED BY items.id;
--
-- Name: links; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE links (
id integer NOT NULL,
organisation_id integer,
user_id integer,
settings character varying(255),
creator boolean DEFAULT false,
master_account boolean DEFAULT false,
rol character varying(50),
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tenant character varying(100)
);
--
-- Name: links_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: links_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE links_id_seq OWNED BY links.id;
--
-- Name: movement_details; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE movement_details (
id integer NOT NULL,
account_id integer,
item_id integer,
quantity numeric(14,2) DEFAULT 0.0,
price numeric(14,2) DEFAULT 0.0,
description character varying(255),
discount numeric(14,2) DEFAULT 0.0,
balance numeric(14,2) DEFAULT 0.0,
original_price numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: movement_details_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE movement_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: movement_details_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE movement_details_id_seq OWNED BY movement_details.id;
--
-- Name: organisations; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE organisations (
id integer NOT NULL,
country_id integer,
name character varying(100),
address character varying(255),
address_alt character varying(255),
phone character varying(40),
phone_alt character varying(40),
mobile character varying(40),
email character varying(255),
website character varying(255),
user_id integer,
due_date date,
preferences text,
time_zone character varying(100),
tenant character varying(50),
currency character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
country_code character varying(5)
);
--
-- Name: organisations_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE organisations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: organisations_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE organisations_id_seq OWNED BY organisations.id;
--
-- Name: pages; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE pages (
id integer NOT NULL,
shopping_store_id integer,
page_type character varying(255) DEFAULT 'layout'::character varying,
sections json DEFAULT '{}'::json,
name character varying(255),
section character varying(255),
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pages_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE pages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pages_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE pages_id_seq OWNED BY pages.id;
--
-- Name: projects; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE projects (
id integer NOT NULL,
name character varying(255),
active boolean DEFAULT true,
date_start date,
date_end date,
description text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: projects_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE schema_migrations (
version character varying(255) NOT NULL
);
--
-- Name: stocks; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE stocks (
id integer NOT NULL,
store_id integer,
item_id integer,
unitary_cost numeric(14,2) DEFAULT 0.0,
quantity numeric(14,2) DEFAULT 0.0,
minimum numeric(14,2) DEFAULT 0.0,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
active boolean DEFAULT true
);
--
-- Name: stocks_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE stocks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stocks_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE stocks_id_seq OWNED BY stocks.id;
--
-- Name: stores; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE stores (
id integer NOT NULL,
name character varying(255),
address character varying(255),
phone character varying(40),
active boolean DEFAULT true,
description character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: stores_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stores_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE stores_id_seq OWNED BY stores.id;
--
-- Name: tag_groups; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE tag_groups (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: tag_groups_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE tag_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE tag_groups_id_seq OWNED BY tag_groups.id;
--
-- Name: tags; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE tags (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: tags_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE tags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
--
-- Name: taxes; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE taxes (
id integer NOT NULL,
name character varying(100),
abreviation character varying(20),
percentage numeric(5,2) DEFAULT 0.0,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: taxes_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE taxes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: taxes_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE taxes_id_seq OWNED BY taxes.id;
--
-- Name: units; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE units (
id integer NOT NULL,
name character varying(100),
symbol character varying(20),
"integer" boolean DEFAULT false,
visible boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: units_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE units_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: units_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE units_id_seq OWNED BY units.id;
--
-- Name: users; Type: TABLE; Schema: demo; Owner: -
--
CREATE TABLE users (
id integer NOT NULL,
email character varying(255) NOT NULL,
first_name character varying(80),
last_name character varying(80),
phone character varying(40),
mobile character varying(40),
website character varying(200),
description character varying(255),
encrypted_password character varying(255),
password_salt character varying(255),
confirmation_token character varying(60),
confirmation_sent_at timestamp without time zone,
confirmed_at timestamp without time zone,
reset_password_token character varying(255),
reset_password_sent_at timestamp without time zone,
reseted_password_at timestamp without time zone,
sign_in_count integer DEFAULT 0,
last_sign_in_at timestamp without time zone,
change_default_password boolean DEFAULT false,
address character varying(255),
active boolean DEFAULT true,
auth_token character varying(255),
rol character varying(50),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: demo; Owner: -
--
CREATE SEQUENCE users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: demo; Owner: -
--
ALTER SEQUENCE users_id_seq OWNED BY users.id;
SET search_path = flor, pg_catalog;
--
-- Name: account_ledgers; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE account_ledgers (
id integer NOT NULL,
reference text,
currency character varying(255),
account_id integer,
account_balance numeric(14,2) DEFAULT 0.0,
account_to_id integer,
account_to_balance numeric(14,2) DEFAULT 0.0,
date date,
operation character varying(20),
amount numeric(14,2) DEFAULT 0.0,
exchange_rate numeric(14,4) DEFAULT 1.0,
creator_id integer,
approver_id integer,
approver_datetime timestamp without time zone,
nuller_id integer,
nuller_datetime timestamp without time zone,
inverse boolean DEFAULT false,
has_error boolean DEFAULT false,
error_messages character varying(255),
project_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status character varying(50) DEFAULT 'approved'::character varying,
updater_id integer,
name character varying(255),
contact_id integer
);
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE account_ledgers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE account_ledgers_id_seq OWNED BY account_ledgers.id;
--
-- Name: accounts; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE accounts (
id integer NOT NULL,
name character varying(255),
currency character varying(10),
exchange_rate numeric(14,4) DEFAULT 1.0,
amount numeric(14,2) DEFAULT 0.0,
type character varying(30),
contact_id integer,
project_id integer,
active boolean DEFAULT true,
description text,
date date,
state character varying(30),
has_error boolean DEFAULT false,
error_messages character varying(400),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
tax_percentage numeric(5,2) DEFAULT 0,
tax_id integer,
total numeric(14,2) DEFAULT 0,
tax_in_out boolean DEFAULT false,
extras jsonb,
creator_id integer,
approver_id integer,
nuller_id integer,
due_date date
);
--
-- Name: accounts_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE accounts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE accounts_id_seq OWNED BY accounts.id;
--
-- Name: attachments; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE attachments (
id integer NOT NULL,
attachment_uid character varying(255),
name character varying(255),
attachable_type character varying(255),
user_id integer,
"position" integer DEFAULT 0,
image boolean DEFAULT false,
size integer,
image_attributes json,
created_at timestamp without time zone,
updated_at timestamp without time zone,
attachable_id integer,
publish boolean DEFAULT false
);
--
-- Name: attachments_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE attachments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: attachments_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE attachments_id_seq OWNED BY attachments.id;
--
-- Name: contacts; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE contacts (
id integer NOT NULL,
matchcode character varying(255),
first_name character varying(100),
last_name character varying(100),
organisation_name character varying(100),
address character varying(250),
phone character varying(40),
mobile character varying(40),
email character varying(200),
tax_number character varying(30),
aditional_info character varying(250),
code character varying(255),
type character varying(255),
"position" character varying(255),
active boolean DEFAULT true,
staff boolean DEFAULT false,
client boolean DEFAULT false,
supplier boolean DEFAULT false,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
incomes_status character varying(300) DEFAULT '{}'::character varying,
expenses_status character varying(300) DEFAULT '{}'::character varying,
tag_ids integer[] DEFAULT '{}'::integer[],
encrypted_password character varying(255),
password_salt character varying(255),
login boolean DEFAULT false,
active_login boolean DEFAULT true
);
--
-- Name: contacts_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE contacts_id_seq OWNED BY contacts.id;
--
-- Name: histories; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE histories (
id integer NOT NULL,
user_id integer,
historiable_id integer,
new_item boolean DEFAULT false,
historiable_type character varying(255),
history_data json DEFAULT '{}'::json,
created_at timestamp without time zone,
klass_type character varying(255),
extras public.hstore,
all_data json DEFAULT '{}'::json
);
--
-- Name: histories_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE histories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: histories_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE histories_id_seq OWNED BY histories.id;
--
-- Name: inventories; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE inventories (
id integer NOT NULL,
contact_id integer,
store_id integer,
account_id integer,
date date,
ref_number character varying(255),
operation character varying(10),
description character varying(255),
total numeric(14,2) DEFAULT 0,
creator_id integer,
transference_id integer,
store_to_id integer,
project_id integer,
has_error boolean DEFAULT false,
error_messages character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
updater_id integer
);
--
-- Name: inventories_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE inventories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventories_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE inventories_id_seq OWNED BY inventories.id;
--
-- Name: inventory_details; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE inventory_details (
id integer NOT NULL,
inventory_id integer,
item_id integer,
store_id integer,
quantity numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: inventory_details_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE inventory_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventory_details_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE inventory_details_id_seq OWNED BY inventory_details.id;
--
-- Name: items; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE items (
id integer NOT NULL,
unit_id integer,
price numeric(14,2) DEFAULT 0.0,
name character varying(255),
description character varying(255),
code character varying(100),
for_sale boolean DEFAULT true,
stockable boolean DEFAULT true,
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
buy_price numeric(14,2) DEFAULT 0.0,
unit_symbol character varying(20),
unit_name character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
creator_id integer,
publish boolean DEFAULT false,
brand character varying(255)
);
--
-- Name: items_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE items_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: items_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE items_id_seq OWNED BY items.id;
--
-- Name: links; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE links (
id integer NOT NULL,
organisation_id integer,
user_id integer,
settings character varying(255),
creator boolean DEFAULT false,
master_account boolean DEFAULT false,
rol character varying(50),
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tenant character varying(100)
);
--
-- Name: links_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: links_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE links_id_seq OWNED BY links.id;
--
-- Name: movement_details; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE movement_details (
id integer NOT NULL,
account_id integer,
item_id integer,
quantity numeric(14,2) DEFAULT 0.0,
price numeric(14,2) DEFAULT 0.0,
description character varying(255),
discount numeric(14,2) DEFAULT 0.0,
balance numeric(14,2) DEFAULT 0.0,
original_price numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: movement_details_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE movement_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: movement_details_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE movement_details_id_seq OWNED BY movement_details.id;
--
-- Name: organisations; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE organisations (
id integer NOT NULL,
country_id integer,
name character varying(100),
address character varying(255),
address_alt character varying(255),
phone character varying(20),
phone_alt character varying(20),
mobile character varying(20),
email character varying(255),
website character varying(255),
user_id integer,
due_date date,
preferences text,
time_zone character varying(100),
tenant character varying(50),
currency character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
country_code character varying(5)
);
--
-- Name: organisations_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE organisations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: organisations_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE organisations_id_seq OWNED BY organisations.id;
--
-- Name: pages; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE pages (
id integer NOT NULL,
shopping_store_id integer,
page_type character varying(255) DEFAULT 'layout'::character varying,
sections json DEFAULT '{}'::json,
name character varying(255),
section character varying(255),
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pages_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE pages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pages_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE pages_id_seq OWNED BY pages.id;
--
-- Name: projects; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE projects (
id integer NOT NULL,
name character varying(255),
active boolean DEFAULT true,
date_start date,
date_end date,
description text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: projects_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE schema_migrations (
version character varying(255) NOT NULL
);
--
-- Name: stocks; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE stocks (
id integer NOT NULL,
store_id integer,
item_id integer,
unitary_cost numeric(14,2) DEFAULT 0.0,
quantity numeric(14,2) DEFAULT 0.0,
minimum numeric(14,2) DEFAULT 0.0,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
active boolean DEFAULT true
);
--
-- Name: stocks_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE stocks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stocks_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE stocks_id_seq OWNED BY stocks.id;
--
-- Name: stores; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE stores (
id integer NOT NULL,
name character varying(255),
address character varying(255),
phone character varying(40),
active boolean DEFAULT true,
description character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: stores_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stores_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE stores_id_seq OWNED BY stores.id;
--
-- Name: tag_groups; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE tag_groups (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: tag_groups_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE tag_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE tag_groups_id_seq OWNED BY tag_groups.id;
--
-- Name: tags; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE tags (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: tags_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE tags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
--
-- Name: taxes; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE taxes (
id integer NOT NULL,
name character varying(100),
abreviation character varying(20),
percentage numeric(5,2) DEFAULT 0.0,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: taxes_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE taxes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: taxes_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE taxes_id_seq OWNED BY taxes.id;
--
-- Name: units; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE units (
id integer NOT NULL,
name character varying(100),
symbol character varying(20),
"integer" boolean DEFAULT false,
visible boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: units_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE units_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: units_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE units_id_seq OWNED BY units.id;
--
-- Name: users; Type: TABLE; Schema: flor; Owner: -
--
CREATE TABLE users (
id integer NOT NULL,
email character varying(255) NOT NULL,
first_name character varying(80),
last_name character varying(80),
phone character varying(20),
mobile character varying(20),
website character varying(200),
description character varying(255),
encrypted_password character varying(255),
password_salt character varying(255),
confirmation_token character varying(60),
confirmation_sent_at timestamp without time zone,
confirmed_at timestamp without time zone,
reset_password_token character varying(255),
reset_password_sent_at timestamp without time zone,
reseted_password_at timestamp without time zone,
sign_in_count integer DEFAULT 0,
last_sign_in_at timestamp without time zone,
change_default_password boolean DEFAULT false,
address character varying(255),
active boolean DEFAULT true,
auth_token character varying(255),
rol character varying(50),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: flor; Owner: -
--
CREATE SEQUENCE users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: flor; Owner: -
--
ALTER SEQUENCE users_id_seq OWNED BY users.id;
SET search_path = public, pg_catalog;
--
-- Name: account_ledgers; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE account_ledgers (
id integer NOT NULL,
reference text,
currency character varying(255),
account_id integer,
account_balance numeric(14,2) DEFAULT 0.0,
account_to_id integer,
account_to_balance numeric(14,2) DEFAULT 0.0,
date date,
operation character varying(20),
amount numeric(14,2) DEFAULT 0.0,
exchange_rate numeric(14,4) DEFAULT 1.0,
creator_id integer,
approver_id integer,
approver_datetime timestamp without time zone,
nuller_id integer,
nuller_datetime timestamp without time zone,
inverse boolean DEFAULT false,
has_error boolean DEFAULT false,
error_messages character varying(255),
project_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status character varying(50) DEFAULT 'approved'::character varying,
updater_id integer,
name character varying(255),
contact_id integer
);
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE account_ledgers_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: account_ledgers_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE account_ledgers_id_seq OWNED BY account_ledgers.id;
--
-- Name: accounts; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE accounts (
id integer NOT NULL,
name character varying(255),
currency character varying(10),
exchange_rate numeric(14,4) DEFAULT 1.0,
amount numeric(14,2) DEFAULT 0.0,
type character varying(30),
contact_id integer,
project_id integer,
active boolean DEFAULT true,
description text,
date date,
state character varying(30),
has_error boolean DEFAULT false,
error_messages character varying(400),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
tax_percentage numeric(5,2) DEFAULT 0,
tax_id integer,
total numeric(14,2) DEFAULT 0,
tax_in_out boolean DEFAULT false,
extras jsonb,
creator_id integer,
approver_id integer,
nuller_id integer,
due_date date
);
--
-- Name: accounts_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE accounts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: accounts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE accounts_id_seq OWNED BY accounts.id;
--
-- Name: attachments; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE attachments (
id integer NOT NULL,
attachment_uid character varying(255),
name character varying(255),
attachable_type character varying(255),
user_id integer,
"position" integer DEFAULT 0,
image boolean DEFAULT false,
size integer,
image_attributes json,
created_at timestamp without time zone,
updated_at timestamp without time zone,
attachable_id integer,
publish boolean DEFAULT false
);
--
-- Name: attachments_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE attachments_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: attachments_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE attachments_id_seq OWNED BY attachments.id;
--
-- Name: contacts; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE contacts (
id integer NOT NULL,
matchcode character varying(255),
first_name character varying(100),
last_name character varying(100),
organisation_name character varying(100),
address character varying(250),
phone character varying(40),
mobile character varying(40),
email character varying(200),
tax_number character varying(30),
aditional_info character varying(250),
code character varying(255),
type character varying(255),
"position" character varying(255),
active boolean DEFAULT true,
staff boolean DEFAULT false,
client boolean DEFAULT false,
supplier boolean DEFAULT false,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
incomes_status character varying(300) DEFAULT '{}'::character varying,
expenses_status character varying(300) DEFAULT '{}'::character varying,
tag_ids integer[] DEFAULT '{}'::integer[],
encrypted_password character varying(255),
password_salt character varying(255),
login boolean DEFAULT false,
active_login boolean DEFAULT true
);
--
-- Name: contacts_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE contacts_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: contacts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE contacts_id_seq OWNED BY contacts.id;
--
-- Name: histories; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE histories (
id integer NOT NULL,
user_id integer,
historiable_id integer,
new_item boolean DEFAULT false,
historiable_type character varying(255),
history_data json DEFAULT '{}'::json,
created_at timestamp without time zone,
klass_type character varying(255),
extras hstore,
all_data json DEFAULT '{}'::json
);
--
-- Name: histories_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE histories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: histories_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE histories_id_seq OWNED BY histories.id;
--
-- Name: inventories; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE inventories (
id integer NOT NULL,
contact_id integer,
store_id integer,
account_id integer,
date date,
ref_number character varying(255),
operation character varying(10),
description character varying(255),
total numeric(14,2) DEFAULT 0,
creator_id integer,
transference_id integer,
store_to_id integer,
project_id integer,
has_error boolean DEFAULT false,
error_messages character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
updater_id integer
);
--
-- Name: inventories_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE inventories_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventories_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE inventories_id_seq OWNED BY inventories.id;
--
-- Name: inventory_details; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE inventory_details (
id integer NOT NULL,
inventory_id integer,
item_id integer,
store_id integer,
quantity numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: inventory_details_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE inventory_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: inventory_details_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE inventory_details_id_seq OWNED BY inventory_details.id;
--
-- Name: items; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE items (
id integer NOT NULL,
unit_id integer,
price numeric(14,2) DEFAULT 0.0,
name character varying(255),
description character varying(255),
code character varying(100),
for_sale boolean DEFAULT true,
stockable boolean DEFAULT true,
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
buy_price numeric(14,2) DEFAULT 0.0,
unit_symbol character varying(20),
unit_name character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
updater_id integer,
creator_id integer,
publish boolean DEFAULT false,
brand character varying(255)
);
--
-- Name: items_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE items_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: items_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE items_id_seq OWNED BY items.id;
--
-- Name: links; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE links (
id integer NOT NULL,
organisation_id integer,
user_id integer,
settings character varying(255),
creator boolean DEFAULT false,
master_account boolean DEFAULT false,
role character varying(50),
active boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
tenant character varying(100),
api_token character varying(255)
);
--
-- Name: links_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE links_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: links_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE links_id_seq OWNED BY links.id;
--
-- Name: movement_details; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE movement_details (
id integer NOT NULL,
account_id integer,
item_id integer,
quantity numeric(14,2) DEFAULT 0.0,
price numeric(14,2) DEFAULT 0.0,
description character varying(255),
discount numeric(14,2) DEFAULT 0.0,
balance numeric(14,2) DEFAULT 0.0,
original_price numeric(14,2) DEFAULT 0.0,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: movement_details_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE movement_details_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: movement_details_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE movement_details_id_seq OWNED BY movement_details.id;
--
-- Name: organisations; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE organisations (
id integer NOT NULL,
country_id integer,
name character varying(100),
address character varying(255),
address_alt character varying(255),
phone character varying(40),
phone_alt character varying(40),
mobile character varying(40),
email character varying(255),
website character varying(255),
user_id integer,
due_date date,
preferences text,
time_zone character varying(100),
tenant character varying(50),
currency character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
country_code character varying(5),
settings hstore DEFAULT '"inventory"=>"true"'::hstore
);
--
-- Name: organizations_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE organizations_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: organizations_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE organizations_id_seq OWNED BY organisations.id;
--
-- Name: pages; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE pages (
id integer NOT NULL,
shopping_store_id integer,
page_type character varying(255) DEFAULT 'layout'::character varying,
sections json DEFAULT '{}'::json,
name character varying(255),
section character varying(255),
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: pages_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE pages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pages_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE pages_id_seq OWNED BY pages.id;
--
-- Name: projects; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE projects (
id integer NOT NULL,
name character varying(255),
active boolean DEFAULT true,
date_start date,
date_end date,
description text,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: projects_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE projects_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: projects_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE projects_id_seq OWNED BY projects.id;
--
-- Name: schema_migrations; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE schema_migrations (
version character varying(255) NOT NULL
);
--
-- Name: shopping_stores; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE shopping_stores (
id integer NOT NULL,
name character varying(255),
publish boolean DEFAULT false,
active boolean DEFAULT false,
url character varying(255),
tenant character varying(255),
organisation_id integer,
configuration json DEFAULT '{}'::json
);
--
-- Name: shopping_stores_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE shopping_stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: shopping_stores_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE shopping_stores_id_seq OWNED BY shopping_stores.id;
--
-- Name: stocks; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE stocks (
id integer NOT NULL,
store_id integer,
item_id integer,
unitary_cost numeric(14,2) DEFAULT 0.0,
quantity numeric(14,2) DEFAULT 0.0,
minimum numeric(14,2) DEFAULT 0.0,
user_id integer,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
active boolean DEFAULT true
);
--
-- Name: stocks_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE stocks_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stocks_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE stocks_id_seq OWNED BY stocks.id;
--
-- Name: stores; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE stores (
id integer NOT NULL,
name character varying(255),
address character varying(255),
phone character varying(40),
active boolean DEFAULT true,
description character varying(255),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: stores_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE stores_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stores_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE stores_id_seq OWNED BY stores.id;
--
-- Name: tag_groups; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE tag_groups (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(255),
tag_ids integer[] DEFAULT '{}'::integer[],
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: tag_groups_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE tag_groups_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE tag_groups_id_seq OWNED BY tag_groups.id;
--
-- Name: tags; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE tags (
id integer NOT NULL,
name character varying(255),
bgcolor character varying(10),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: tags_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE tags_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tags_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE tags_id_seq OWNED BY tags.id;
--
-- Name: taxes; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE taxes (
id integer NOT NULL,
name character varying(100),
abreviation character varying(20),
percentage numeric(5,2) DEFAULT 0.0,
created_at timestamp without time zone,
updated_at timestamp without time zone
);
--
-- Name: taxes_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE taxes_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: taxes_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE taxes_id_seq OWNED BY taxes.id;
--
-- Name: units; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE units (
id integer NOT NULL,
name character varying(100),
symbol character varying(20),
"integer" boolean DEFAULT false,
visible boolean DEFAULT true,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL
);
--
-- Name: units_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE units_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: units_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE units_id_seq OWNED BY units.id;
--
-- Name: users; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE users (
id integer NOT NULL,
email character varying(255) NOT NULL,
first_name character varying(80),
last_name character varying(80),
phone character varying(40),
mobile character varying(40),
website character varying(200),
description character varying(255),
encrypted_password character varying(255),
password_salt character varying(255),
confirmation_token character varying(60),
confirmation_sent_at timestamp without time zone,
confirmed_at timestamp without time zone,
reset_password_token character varying(255),
reset_password_sent_at timestamp without time zone,
reseted_password_at timestamp without time zone,
sign_in_count integer DEFAULT 0,
last_sign_in_at timestamp without time zone,
change_default_password boolean DEFAULT false,
address character varying(255),
active boolean DEFAULT true,
auth_token character varying(255),
rol character varying(50),
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
old_emails text[] DEFAULT '{}'::text[],
locale character varying DEFAULT 'en'::character varying
);
--
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE users_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE users_id_seq OWNED BY users.id;
SET search_path = biobolsas, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY account_ledgers ALTER COLUMN id SET DEFAULT nextval('account_ledgers_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY accounts ALTER COLUMN id SET DEFAULT nextval('accounts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY attachments ALTER COLUMN id SET DEFAULT nextval('attachments_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY contacts ALTER COLUMN id SET DEFAULT nextval('contacts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY histories ALTER COLUMN id SET DEFAULT nextval('histories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY inventories ALTER COLUMN id SET DEFAULT nextval('inventories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY inventory_details ALTER COLUMN id SET DEFAULT nextval('inventory_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY items ALTER COLUMN id SET DEFAULT nextval('items_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY movement_details ALTER COLUMN id SET DEFAULT nextval('movement_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY pages ALTER COLUMN id SET DEFAULT nextval('pages_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY stocks ALTER COLUMN id SET DEFAULT nextval('stocks_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY stores ALTER COLUMN id SET DEFAULT nextval('stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY tag_groups ALTER COLUMN id SET DEFAULT nextval('tag_groups_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY taxes ALTER COLUMN id SET DEFAULT nextval('taxes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY units ALTER COLUMN id SET DEFAULT nextval('units_id_seq'::regclass);
SET search_path = bonsai, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY account_ledgers ALTER COLUMN id SET DEFAULT nextval('account_ledgers_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY accounts ALTER COLUMN id SET DEFAULT nextval('accounts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY attachments ALTER COLUMN id SET DEFAULT nextval('attachments_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY contacts ALTER COLUMN id SET DEFAULT nextval('contacts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY histories ALTER COLUMN id SET DEFAULT nextval('histories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY inventories ALTER COLUMN id SET DEFAULT nextval('inventories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY inventory_details ALTER COLUMN id SET DEFAULT nextval('inventory_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY items ALTER COLUMN id SET DEFAULT nextval('items_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY movement_details ALTER COLUMN id SET DEFAULT nextval('movement_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY pages ALTER COLUMN id SET DEFAULT nextval('pages_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY stocks ALTER COLUMN id SET DEFAULT nextval('stocks_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY stores ALTER COLUMN id SET DEFAULT nextval('stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY tag_groups ALTER COLUMN id SET DEFAULT nextval('tag_groups_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY taxes ALTER COLUMN id SET DEFAULT nextval('taxes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY units ALTER COLUMN id SET DEFAULT nextval('units_id_seq'::regclass);
SET search_path = common, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: common; Owner: -
--
ALTER TABLE ONLY links ALTER COLUMN id SET DEFAULT nextval('links_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: common; Owner: -
--
ALTER TABLE ONLY organisations ALTER COLUMN id SET DEFAULT nextval('organizations_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: common; Owner: -
--
ALTER TABLE ONLY shopping_stores ALTER COLUMN id SET DEFAULT nextval('shopping_stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: common; Owner: -
--
ALTER TABLE ONLY users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
SET search_path = demo, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY account_ledgers ALTER COLUMN id SET DEFAULT nextval('account_ledgers_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY accounts ALTER COLUMN id SET DEFAULT nextval('accounts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY attachments ALTER COLUMN id SET DEFAULT nextval('attachments_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY contacts ALTER COLUMN id SET DEFAULT nextval('contacts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY histories ALTER COLUMN id SET DEFAULT nextval('histories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY inventories ALTER COLUMN id SET DEFAULT nextval('inventories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY inventory_details ALTER COLUMN id SET DEFAULT nextval('inventory_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY items ALTER COLUMN id SET DEFAULT nextval('items_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY links ALTER COLUMN id SET DEFAULT nextval('links_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY movement_details ALTER COLUMN id SET DEFAULT nextval('movement_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY organisations ALTER COLUMN id SET DEFAULT nextval('organisations_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY pages ALTER COLUMN id SET DEFAULT nextval('pages_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY stocks ALTER COLUMN id SET DEFAULT nextval('stocks_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY stores ALTER COLUMN id SET DEFAULT nextval('stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY tag_groups ALTER COLUMN id SET DEFAULT nextval('tag_groups_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY taxes ALTER COLUMN id SET DEFAULT nextval('taxes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY units ALTER COLUMN id SET DEFAULT nextval('units_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: demo; Owner: -
--
ALTER TABLE ONLY users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
SET search_path = flor, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY account_ledgers ALTER COLUMN id SET DEFAULT nextval('account_ledgers_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY accounts ALTER COLUMN id SET DEFAULT nextval('accounts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY attachments ALTER COLUMN id SET DEFAULT nextval('attachments_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY contacts ALTER COLUMN id SET DEFAULT nextval('contacts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY histories ALTER COLUMN id SET DEFAULT nextval('histories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY inventories ALTER COLUMN id SET DEFAULT nextval('inventories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY inventory_details ALTER COLUMN id SET DEFAULT nextval('inventory_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY items ALTER COLUMN id SET DEFAULT nextval('items_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY links ALTER COLUMN id SET DEFAULT nextval('links_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY movement_details ALTER COLUMN id SET DEFAULT nextval('movement_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY organisations ALTER COLUMN id SET DEFAULT nextval('organisations_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY pages ALTER COLUMN id SET DEFAULT nextval('pages_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY stocks ALTER COLUMN id SET DEFAULT nextval('stocks_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY stores ALTER COLUMN id SET DEFAULT nextval('stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY tag_groups ALTER COLUMN id SET DEFAULT nextval('tag_groups_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY taxes ALTER COLUMN id SET DEFAULT nextval('taxes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY units ALTER COLUMN id SET DEFAULT nextval('units_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: flor; Owner: -
--
ALTER TABLE ONLY users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
SET search_path = public, pg_catalog;
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY account_ledgers ALTER COLUMN id SET DEFAULT nextval('account_ledgers_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY accounts ALTER COLUMN id SET DEFAULT nextval('accounts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY attachments ALTER COLUMN id SET DEFAULT nextval('attachments_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY contacts ALTER COLUMN id SET DEFAULT nextval('contacts_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY histories ALTER COLUMN id SET DEFAULT nextval('histories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY inventories ALTER COLUMN id SET DEFAULT nextval('inventories_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY inventory_details ALTER COLUMN id SET DEFAULT nextval('inventory_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY items ALTER COLUMN id SET DEFAULT nextval('items_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY links ALTER COLUMN id SET DEFAULT nextval('links_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY movement_details ALTER COLUMN id SET DEFAULT nextval('movement_details_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY organisations ALTER COLUMN id SET DEFAULT nextval('organizations_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY pages ALTER COLUMN id SET DEFAULT nextval('pages_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY projects ALTER COLUMN id SET DEFAULT nextval('projects_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY shopping_stores ALTER COLUMN id SET DEFAULT nextval('shopping_stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY stocks ALTER COLUMN id SET DEFAULT nextval('stocks_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY stores ALTER COLUMN id SET DEFAULT nextval('stores_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY tag_groups ALTER COLUMN id SET DEFAULT nextval('tag_groups_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY tags ALTER COLUMN id SET DEFAULT nextval('tags_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY taxes ALTER COLUMN id SET DEFAULT nextval('taxes_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY units ALTER COLUMN id SET DEFAULT nextval('units_id_seq'::regclass);
--
-- Name: id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY users ALTER COLUMN id SET DEFAULT nextval('users_id_seq'::regclass);
SET search_path = biobolsas, pg_catalog;
--
-- Name: account_ledgers_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY account_ledgers
ADD CONSTRAINT account_ledgers_pkey PRIMARY KEY (id);
--
-- Name: accounts_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY accounts
ADD CONSTRAINT accounts_pkey PRIMARY KEY (id);
--
-- Name: attachments_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY attachments
ADD CONSTRAINT attachments_pkey PRIMARY KEY (id);
--
-- Name: contacts_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY contacts
ADD CONSTRAINT contacts_pkey PRIMARY KEY (id);
--
-- Name: histories_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY histories
ADD CONSTRAINT histories_pkey PRIMARY KEY (id);
--
-- Name: inventory_operation_details_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY inventory_details
ADD CONSTRAINT inventory_operation_details_pkey PRIMARY KEY (id);
--
-- Name: inventory_operations_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY inventories
ADD CONSTRAINT inventory_operations_pkey PRIMARY KEY (id);
--
-- Name: items_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY items
ADD CONSTRAINT items_pkey PRIMARY KEY (id);
--
-- Name: pages_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY pages
ADD CONSTRAINT pages_pkey PRIMARY KEY (id);
--
-- Name: projects_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
--
-- Name: stocks_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY stocks
ADD CONSTRAINT stocks_pkey PRIMARY KEY (id);
--
-- Name: stores_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY stores
ADD CONSTRAINT stores_pkey PRIMARY KEY (id);
--
-- Name: tag_groups_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY tag_groups
ADD CONSTRAINT tag_groups_pkey PRIMARY KEY (id);
--
-- Name: tags_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY tags
ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
--
-- Name: taxes_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY taxes
ADD CONSTRAINT taxes_pkey PRIMARY KEY (id);
--
-- Name: transaction_details_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY movement_details
ADD CONSTRAINT transaction_details_pkey PRIMARY KEY (id);
--
-- Name: units_pkey; Type: CONSTRAINT; Schema: biobolsas; Owner: -
--
ALTER TABLE ONLY units
ADD CONSTRAINT units_pkey PRIMARY KEY (id);
SET search_path = bonsai, pg_catalog;
--
-- Name: account_ledgers_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY account_ledgers
ADD CONSTRAINT account_ledgers_pkey PRIMARY KEY (id);
--
-- Name: accounts_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY accounts
ADD CONSTRAINT accounts_pkey PRIMARY KEY (id);
--
-- Name: attachments_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY attachments
ADD CONSTRAINT attachments_pkey PRIMARY KEY (id);
--
-- Name: contacts_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY contacts
ADD CONSTRAINT contacts_pkey PRIMARY KEY (id);
--
-- Name: histories_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY histories
ADD CONSTRAINT histories_pkey PRIMARY KEY (id);
--
-- Name: inventory_operation_details_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY inventory_details
ADD CONSTRAINT inventory_operation_details_pkey PRIMARY KEY (id);
--
-- Name: inventory_operations_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY inventories
ADD CONSTRAINT inventory_operations_pkey PRIMARY KEY (id);
--
-- Name: items_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY items
ADD CONSTRAINT items_pkey PRIMARY KEY (id);
--
-- Name: pages_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY pages
ADD CONSTRAINT pages_pkey PRIMARY KEY (id);
--
-- Name: projects_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
--
-- Name: stocks_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY stocks
ADD CONSTRAINT stocks_pkey PRIMARY KEY (id);
--
-- Name: stores_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY stores
ADD CONSTRAINT stores_pkey PRIMARY KEY (id);
--
-- Name: tag_groups_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY tag_groups
ADD CONSTRAINT tag_groups_pkey PRIMARY KEY (id);
--
-- Name: tags_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY tags
ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
--
-- Name: taxes_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY taxes
ADD CONSTRAINT taxes_pkey PRIMARY KEY (id);
--
-- Name: transaction_details_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY movement_details
ADD CONSTRAINT transaction_details_pkey PRIMARY KEY (id);
--
-- Name: units_pkey; Type: CONSTRAINT; Schema: bonsai; Owner: -
--
ALTER TABLE ONLY units
ADD CONSTRAINT units_pkey PRIMARY KEY (id);
SET search_path = common, pg_catalog;
--
-- Name: links_pkey; Type: CONSTRAINT; Schema: common; Owner: -
--
ALTER TABLE ONLY links
ADD CONSTRAINT links_pkey PRIMARY KEY (id);
--
-- Name: organizations_pkey; Type: CONSTRAINT; Schema: common; Owner: -
--
ALTER TABLE ONLY organisations
ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
--
-- Name: shopping_stores_pkey; Type: CONSTRAINT; Schema: common; Owner: -
--
ALTER TABLE ONLY shopping_stores
ADD CONSTRAINT shopping_stores_pkey PRIMARY KEY (id);
--
-- Name: users_pkey; Type: CONSTRAINT; Schema: common; Owner: -
--
ALTER TABLE ONLY users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
SET search_path = demo, pg_catalog;
--
-- Name: account_ledgers_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY account_ledgers
ADD CONSTRAINT account_ledgers_pkey PRIMARY KEY (id);
--
-- Name: accounts_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY accounts
ADD CONSTRAINT accounts_pkey PRIMARY KEY (id);
--
-- Name: attachments_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY attachments
ADD CONSTRAINT attachments_pkey PRIMARY KEY (id);
--
-- Name: contacts_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY contacts
ADD CONSTRAINT contacts_pkey PRIMARY KEY (id);
--
-- Name: histories_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY histories
ADD CONSTRAINT histories_pkey PRIMARY KEY (id);
--
-- Name: inventory_operation_details_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY inventory_details
ADD CONSTRAINT inventory_operation_details_pkey PRIMARY KEY (id);
--
-- Name: inventory_operations_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY inventories
ADD CONSTRAINT inventory_operations_pkey PRIMARY KEY (id);
--
-- Name: items_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY items
ADD CONSTRAINT items_pkey PRIMARY KEY (id);
--
-- Name: links_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY links
ADD CONSTRAINT links_pkey PRIMARY KEY (id);
--
-- Name: organisations_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY organisations
ADD CONSTRAINT organisations_pkey PRIMARY KEY (id);
--
-- Name: pages_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY pages
ADD CONSTRAINT pages_pkey PRIMARY KEY (id);
--
-- Name: projects_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
--
-- Name: stocks_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY stocks
ADD CONSTRAINT stocks_pkey PRIMARY KEY (id);
--
-- Name: stores_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY stores
ADD CONSTRAINT stores_pkey PRIMARY KEY (id);
--
-- Name: tag_groups_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY tag_groups
ADD CONSTRAINT tag_groups_pkey PRIMARY KEY (id);
--
-- Name: tags_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY tags
ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
--
-- Name: taxes_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY taxes
ADD CONSTRAINT taxes_pkey PRIMARY KEY (id);
--
-- Name: transaction_details_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY movement_details
ADD CONSTRAINT transaction_details_pkey PRIMARY KEY (id);
--
-- Name: units_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY units
ADD CONSTRAINT units_pkey PRIMARY KEY (id);
--
-- Name: users_pkey; Type: CONSTRAINT; Schema: demo; Owner: -
--
ALTER TABLE ONLY users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
SET search_path = flor, pg_catalog;
--
-- Name: account_ledgers_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY account_ledgers
ADD CONSTRAINT account_ledgers_pkey PRIMARY KEY (id);
--
-- Name: accounts_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY accounts
ADD CONSTRAINT accounts_pkey PRIMARY KEY (id);
--
-- Name: attachments_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY attachments
ADD CONSTRAINT attachments_pkey PRIMARY KEY (id);
--
-- Name: contacts_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY contacts
ADD CONSTRAINT contacts_pkey PRIMARY KEY (id);
--
-- Name: histories_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY histories
ADD CONSTRAINT histories_pkey PRIMARY KEY (id);
--
-- Name: inventory_operation_details_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY inventory_details
ADD CONSTRAINT inventory_operation_details_pkey PRIMARY KEY (id);
--
-- Name: inventory_operations_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY inventories
ADD CONSTRAINT inventory_operations_pkey PRIMARY KEY (id);
--
-- Name: items_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY items
ADD CONSTRAINT items_pkey PRIMARY KEY (id);
--
-- Name: links_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY links
ADD CONSTRAINT links_pkey PRIMARY KEY (id);
--
-- Name: organisations_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY organisations
ADD CONSTRAINT organisations_pkey PRIMARY KEY (id);
--
-- Name: pages_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY pages
ADD CONSTRAINT pages_pkey PRIMARY KEY (id);
--
-- Name: projects_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
--
-- Name: stocks_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY stocks
ADD CONSTRAINT stocks_pkey PRIMARY KEY (id);
--
-- Name: stores_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY stores
ADD CONSTRAINT stores_pkey PRIMARY KEY (id);
--
-- Name: tag_groups_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY tag_groups
ADD CONSTRAINT tag_groups_pkey PRIMARY KEY (id);
--
-- Name: tags_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY tags
ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
--
-- Name: taxes_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY taxes
ADD CONSTRAINT taxes_pkey PRIMARY KEY (id);
--
-- Name: transaction_details_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY movement_details
ADD CONSTRAINT transaction_details_pkey PRIMARY KEY (id);
--
-- Name: units_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY units
ADD CONSTRAINT units_pkey PRIMARY KEY (id);
--
-- Name: users_pkey; Type: CONSTRAINT; Schema: flor; Owner: -
--
ALTER TABLE ONLY users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
SET search_path = public, pg_catalog;
--
-- Name: account_ledgers_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY account_ledgers
ADD CONSTRAINT account_ledgers_pkey PRIMARY KEY (id);
--
-- Name: accounts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY accounts
ADD CONSTRAINT accounts_pkey PRIMARY KEY (id);
--
-- Name: attachments_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY attachments
ADD CONSTRAINT attachments_pkey PRIMARY KEY (id);
--
-- Name: contacts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY contacts
ADD CONSTRAINT contacts_pkey PRIMARY KEY (id);
--
-- Name: histories_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY histories
ADD CONSTRAINT histories_pkey PRIMARY KEY (id);
--
-- Name: inventory_operation_details_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY inventory_details
ADD CONSTRAINT inventory_operation_details_pkey PRIMARY KEY (id);
--
-- Name: inventory_operations_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY inventories
ADD CONSTRAINT inventory_operations_pkey PRIMARY KEY (id);
--
-- Name: items_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY items
ADD CONSTRAINT items_pkey PRIMARY KEY (id);
--
-- Name: links_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY links
ADD CONSTRAINT links_pkey PRIMARY KEY (id);
--
-- Name: organizations_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY organisations
ADD CONSTRAINT organizations_pkey PRIMARY KEY (id);
--
-- Name: pages_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY pages
ADD CONSTRAINT pages_pkey PRIMARY KEY (id);
--
-- Name: projects_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY projects
ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
--
-- Name: shopping_stores_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY shopping_stores
ADD CONSTRAINT shopping_stores_pkey PRIMARY KEY (id);
--
-- Name: stocks_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY stocks
ADD CONSTRAINT stocks_pkey PRIMARY KEY (id);
--
-- Name: stores_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY stores
ADD CONSTRAINT stores_pkey PRIMARY KEY (id);
--
-- Name: tag_groups_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY tag_groups
ADD CONSTRAINT tag_groups_pkey PRIMARY KEY (id);
--
-- Name: tags_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY tags
ADD CONSTRAINT tags_pkey PRIMARY KEY (id);
--
-- Name: taxes_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY taxes
ADD CONSTRAINT taxes_pkey PRIMARY KEY (id);
--
-- Name: transaction_details_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY movement_details
ADD CONSTRAINT transaction_details_pkey PRIMARY KEY (id);
--
-- Name: units_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY units
ADD CONSTRAINT units_pkey PRIMARY KEY (id);
--
-- Name: users_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
SET search_path = biobolsas, pg_catalog;
--
-- Name: index_account_ledgers_on_account_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_id ON account_ledgers USING btree (account_id);
--
-- Name: index_account_ledgers_on_account_to_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_to_id ON account_ledgers USING btree (account_to_id);
--
-- Name: index_account_ledgers_on_contact_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_contact_id ON account_ledgers USING btree (contact_id);
--
-- Name: index_account_ledgers_on_currency; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_currency ON account_ledgers USING btree (currency);
--
-- Name: index_account_ledgers_on_date; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_date ON account_ledgers USING btree (date);
--
-- Name: index_account_ledgers_on_has_error; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_has_error ON account_ledgers USING btree (has_error);
--
-- Name: index_account_ledgers_on_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE UNIQUE INDEX index_account_ledgers_on_name ON account_ledgers USING btree (name);
--
-- Name: index_account_ledgers_on_operation; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_operation ON account_ledgers USING btree (operation);
--
-- Name: index_account_ledgers_on_project_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_project_id ON account_ledgers USING btree (project_id);
--
-- Name: index_account_ledgers_on_reference; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_reference ON account_ledgers USING gin (reference public.gin_trgm_ops);
--
-- Name: index_account_ledgers_on_status; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_status ON account_ledgers USING btree (status);
--
-- Name: index_account_ledgers_on_updater_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_account_ledgers_on_updater_id ON account_ledgers USING btree (updater_id);
--
-- Name: index_accounts_on_active; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_active ON accounts USING btree (active);
--
-- Name: index_accounts_on_amount; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_amount ON accounts USING btree (amount);
--
-- Name: index_accounts_on_approver_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_approver_id ON accounts USING btree (approver_id);
--
-- Name: index_accounts_on_contact_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_contact_id ON accounts USING btree (contact_id);
--
-- Name: index_accounts_on_creator_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_creator_id ON accounts USING btree (creator_id);
--
-- Name: index_accounts_on_currency; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_currency ON accounts USING btree (currency);
--
-- Name: index_accounts_on_date; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_date ON accounts USING btree (date);
--
-- Name: index_accounts_on_description; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_description ON accounts USING gin (description public.gin_trgm_ops);
--
-- Name: index_accounts_on_due_date; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_due_date ON accounts USING btree (due_date);
--
-- Name: index_accounts_on_extras; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_extras ON accounts USING gin (extras);
--
-- Name: index_accounts_on_has_error; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_has_error ON accounts USING btree (has_error);
--
-- Name: index_accounts_on_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE UNIQUE INDEX index_accounts_on_name ON accounts USING btree (name);
--
-- Name: index_accounts_on_nuller_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_nuller_id ON accounts USING btree (nuller_id);
--
-- Name: index_accounts_on_project_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_project_id ON accounts USING btree (project_id);
--
-- Name: index_accounts_on_state; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_state ON accounts USING btree (state);
--
-- Name: index_accounts_on_tag_ids; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_tag_ids ON accounts USING gin (tag_ids);
--
-- Name: index_accounts_on_tax_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_tax_id ON accounts USING btree (tax_id);
--
-- Name: index_accounts_on_tax_in_out; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_tax_in_out ON accounts USING btree (tax_in_out);
--
-- Name: index_accounts_on_type; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_type ON accounts USING btree (type);
--
-- Name: index_accounts_on_updater_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_accounts_on_updater_id ON accounts USING btree (updater_id);
--
-- Name: index_attachments_on_attachable_id_and_attachable_type; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_attachments_on_attachable_id_and_attachable_type ON attachments USING btree (attachable_id, attachable_type);
--
-- Name: index_attachments_on_image; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_attachments_on_image ON attachments USING btree (image);
--
-- Name: index_attachments_on_publish; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_attachments_on_publish ON attachments USING btree (publish);
--
-- Name: index_attachments_on_user_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_attachments_on_user_id ON attachments USING btree (user_id);
--
-- Name: index_contacts_on_active; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_active ON contacts USING btree (active);
--
-- Name: index_contacts_on_active_login; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_active_login ON contacts USING btree (active_login);
--
-- Name: index_contacts_on_client; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_client ON contacts USING btree (client);
--
-- Name: index_contacts_on_first_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_first_name ON contacts USING btree (first_name);
--
-- Name: index_contacts_on_last_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_last_name ON contacts USING btree (last_name);
--
-- Name: index_contacts_on_login; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_login ON contacts USING btree (login);
--
-- Name: index_contacts_on_matchcode; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_matchcode ON contacts USING btree (matchcode);
--
-- Name: index_contacts_on_staff; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_staff ON contacts USING btree (staff);
--
-- Name: index_contacts_on_supplier; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_supplier ON contacts USING btree (supplier);
--
-- Name: index_contacts_on_tag_ids; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_contacts_on_tag_ids ON contacts USING gin (tag_ids);
--
-- Name: index_histories_on_created_at; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_histories_on_created_at ON histories USING btree (created_at);
--
-- Name: index_histories_on_historiable_id_and_historiable_type; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_histories_on_historiable_id_and_historiable_type ON histories USING btree (historiable_id, historiable_type);
--
-- Name: index_histories_on_user_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_histories_on_user_id ON histories USING btree (user_id);
--
-- Name: index_inventories_on_updater_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventories_on_updater_id ON inventories USING btree (updater_id);
--
-- Name: index_inventory_details_on_inventory_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_details_on_inventory_id ON inventory_details USING btree (inventory_id);
--
-- Name: index_inventory_operation_details_on_item_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_item_id ON inventory_details USING btree (item_id);
--
-- Name: index_inventory_operation_details_on_store_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_store_id ON inventory_details USING btree (store_id);
--
-- Name: index_inventory_operations_on_account_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_account_id ON inventories USING btree (account_id);
--
-- Name: index_inventory_operations_on_contact_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_contact_id ON inventories USING btree (contact_id);
--
-- Name: index_inventory_operations_on_date; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_date ON inventories USING btree (date);
--
-- Name: index_inventory_operations_on_has_error; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_has_error ON inventories USING btree (has_error);
--
-- Name: index_inventory_operations_on_operation; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_operation ON inventories USING btree (operation);
--
-- Name: index_inventory_operations_on_project_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_project_id ON inventories USING btree (project_id);
--
-- Name: index_inventory_operations_on_ref_number; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_ref_number ON inventories USING btree (ref_number);
--
-- Name: index_inventory_operations_on_store_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_inventory_operations_on_store_id ON inventories USING btree (store_id);
--
-- Name: index_items_on_code; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_code ON items USING btree (code);
--
-- Name: index_items_on_creator_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_creator_id ON items USING btree (creator_id);
--
-- Name: index_items_on_for_sale; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_for_sale ON items USING btree (for_sale);
--
-- Name: index_items_on_publish; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_publish ON items USING btree (publish);
--
-- Name: index_items_on_stockable; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_stockable ON items USING btree (stockable);
--
-- Name: index_items_on_tag_ids; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_tag_ids ON items USING gin (tag_ids);
--
-- Name: index_items_on_unit_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_unit_id ON items USING btree (unit_id);
--
-- Name: index_items_on_updater_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_items_on_updater_id ON items USING btree (updater_id);
--
-- Name: index_movement_details_on_account_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_movement_details_on_account_id ON movement_details USING btree (account_id);
--
-- Name: index_movement_details_on_item_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_movement_details_on_item_id ON movement_details USING btree (item_id);
--
-- Name: index_projects_on_active; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_projects_on_active ON projects USING btree (active);
--
-- Name: index_stocks_on_active; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_active ON stocks USING btree (active);
--
-- Name: index_stocks_on_item_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_item_id ON stocks USING btree (item_id);
--
-- Name: index_stocks_on_minimum; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_minimum ON stocks USING btree (minimum);
--
-- Name: index_stocks_on_quantity; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_quantity ON stocks USING btree (quantity);
--
-- Name: index_stocks_on_store_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_store_id ON stocks USING btree (store_id);
--
-- Name: index_stocks_on_user_id; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_stocks_on_user_id ON stocks USING btree (user_id);
--
-- Name: index_tag_groups_on_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE UNIQUE INDEX index_tag_groups_on_name ON tag_groups USING btree (name);
--
-- Name: index_tag_groups_on_tag_ids; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_tag_groups_on_tag_ids ON tag_groups USING gin (tag_ids);
--
-- Name: index_tags_on_name; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE INDEX index_tags_on_name ON tags USING btree (name);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: biobolsas; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
SET search_path = bonsai, pg_catalog;
--
-- Name: index_account_ledgers_on_account_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_id ON account_ledgers USING btree (account_id);
--
-- Name: index_account_ledgers_on_account_to_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_to_id ON account_ledgers USING btree (account_to_id);
--
-- Name: index_account_ledgers_on_contact_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_contact_id ON account_ledgers USING btree (contact_id);
--
-- Name: index_account_ledgers_on_currency; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_currency ON account_ledgers USING btree (currency);
--
-- Name: index_account_ledgers_on_date; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_date ON account_ledgers USING btree (date);
--
-- Name: index_account_ledgers_on_has_error; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_has_error ON account_ledgers USING btree (has_error);
--
-- Name: index_account_ledgers_on_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE UNIQUE INDEX index_account_ledgers_on_name ON account_ledgers USING btree (name);
--
-- Name: index_account_ledgers_on_operation; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_operation ON account_ledgers USING btree (operation);
--
-- Name: index_account_ledgers_on_project_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_project_id ON account_ledgers USING btree (project_id);
--
-- Name: index_account_ledgers_on_reference; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_reference ON account_ledgers USING gin (reference public.gin_trgm_ops);
--
-- Name: index_account_ledgers_on_status; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_status ON account_ledgers USING btree (status);
--
-- Name: index_account_ledgers_on_updater_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_account_ledgers_on_updater_id ON account_ledgers USING btree (updater_id);
--
-- Name: index_accounts_on_active; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_active ON accounts USING btree (active);
--
-- Name: index_accounts_on_amount; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_amount ON accounts USING btree (amount);
--
-- Name: index_accounts_on_approver_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_approver_id ON accounts USING btree (approver_id);
--
-- Name: index_accounts_on_contact_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_contact_id ON accounts USING btree (contact_id);
--
-- Name: index_accounts_on_creator_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_creator_id ON accounts USING btree (creator_id);
--
-- Name: index_accounts_on_currency; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_currency ON accounts USING btree (currency);
--
-- Name: index_accounts_on_date; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_date ON accounts USING btree (date);
--
-- Name: index_accounts_on_description; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_description ON accounts USING gin (description public.gin_trgm_ops);
--
-- Name: index_accounts_on_due_date; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_due_date ON accounts USING btree (due_date);
--
-- Name: index_accounts_on_extras; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_extras ON accounts USING gin (extras);
--
-- Name: index_accounts_on_has_error; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_has_error ON accounts USING btree (has_error);
--
-- Name: index_accounts_on_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE UNIQUE INDEX index_accounts_on_name ON accounts USING btree (name);
--
-- Name: index_accounts_on_nuller_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_nuller_id ON accounts USING btree (nuller_id);
--
-- Name: index_accounts_on_project_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_project_id ON accounts USING btree (project_id);
--
-- Name: index_accounts_on_state; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_state ON accounts USING btree (state);
--
-- Name: index_accounts_on_tag_ids; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_tag_ids ON accounts USING gin (tag_ids);
--
-- Name: index_accounts_on_tax_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_tax_id ON accounts USING btree (tax_id);
--
-- Name: index_accounts_on_tax_in_out; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_tax_in_out ON accounts USING btree (tax_in_out);
--
-- Name: index_accounts_on_type; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_type ON accounts USING btree (type);
--
-- Name: index_accounts_on_updater_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_accounts_on_updater_id ON accounts USING btree (updater_id);
--
-- Name: index_attachments_on_attachable_id_and_attachable_type; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_attachments_on_attachable_id_and_attachable_type ON attachments USING btree (attachable_id, attachable_type);
--
-- Name: index_attachments_on_image; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_attachments_on_image ON attachments USING btree (image);
--
-- Name: index_attachments_on_publish; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_attachments_on_publish ON attachments USING btree (publish);
--
-- Name: index_attachments_on_user_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_attachments_on_user_id ON attachments USING btree (user_id);
--
-- Name: index_contacts_on_active; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_active ON contacts USING btree (active);
--
-- Name: index_contacts_on_active_login; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_active_login ON contacts USING btree (active_login);
--
-- Name: index_contacts_on_client; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_client ON contacts USING btree (client);
--
-- Name: index_contacts_on_first_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_first_name ON contacts USING btree (first_name);
--
-- Name: index_contacts_on_last_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_last_name ON contacts USING btree (last_name);
--
-- Name: index_contacts_on_login; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_login ON contacts USING btree (login);
--
-- Name: index_contacts_on_matchcode; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_matchcode ON contacts USING btree (matchcode);
--
-- Name: index_contacts_on_staff; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_staff ON contacts USING btree (staff);
--
-- Name: index_contacts_on_supplier; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_supplier ON contacts USING btree (supplier);
--
-- Name: index_contacts_on_tag_ids; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_contacts_on_tag_ids ON contacts USING gin (tag_ids);
--
-- Name: index_histories_on_created_at; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_histories_on_created_at ON histories USING btree (created_at);
--
-- Name: index_histories_on_historiable_id_and_historiable_type; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_histories_on_historiable_id_and_historiable_type ON histories USING btree (historiable_id, historiable_type);
--
-- Name: index_histories_on_user_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_histories_on_user_id ON histories USING btree (user_id);
--
-- Name: index_inventories_on_updater_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventories_on_updater_id ON inventories USING btree (updater_id);
--
-- Name: index_inventory_details_on_inventory_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_details_on_inventory_id ON inventory_details USING btree (inventory_id);
--
-- Name: index_inventory_operation_details_on_item_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_item_id ON inventory_details USING btree (item_id);
--
-- Name: index_inventory_operation_details_on_store_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_store_id ON inventory_details USING btree (store_id);
--
-- Name: index_inventory_operations_on_account_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_account_id ON inventories USING btree (account_id);
--
-- Name: index_inventory_operations_on_contact_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_contact_id ON inventories USING btree (contact_id);
--
-- Name: index_inventory_operations_on_date; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_date ON inventories USING btree (date);
--
-- Name: index_inventory_operations_on_has_error; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_has_error ON inventories USING btree (has_error);
--
-- Name: index_inventory_operations_on_operation; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_operation ON inventories USING btree (operation);
--
-- Name: index_inventory_operations_on_project_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_project_id ON inventories USING btree (project_id);
--
-- Name: index_inventory_operations_on_ref_number; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_ref_number ON inventories USING btree (ref_number);
--
-- Name: index_inventory_operations_on_store_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_inventory_operations_on_store_id ON inventories USING btree (store_id);
--
-- Name: index_items_on_code; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_code ON items USING btree (code);
--
-- Name: index_items_on_creator_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_creator_id ON items USING btree (creator_id);
--
-- Name: index_items_on_for_sale; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_for_sale ON items USING btree (for_sale);
--
-- Name: index_items_on_publish; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_publish ON items USING btree (publish);
--
-- Name: index_items_on_stockable; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_stockable ON items USING btree (stockable);
--
-- Name: index_items_on_tag_ids; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_tag_ids ON items USING gin (tag_ids);
--
-- Name: index_items_on_unit_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_unit_id ON items USING btree (unit_id);
--
-- Name: index_items_on_updater_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_items_on_updater_id ON items USING btree (updater_id);
--
-- Name: index_movement_details_on_account_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_movement_details_on_account_id ON movement_details USING btree (account_id);
--
-- Name: index_movement_details_on_item_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_movement_details_on_item_id ON movement_details USING btree (item_id);
--
-- Name: index_projects_on_active; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_projects_on_active ON projects USING btree (active);
--
-- Name: index_stocks_on_active; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_active ON stocks USING btree (active);
--
-- Name: index_stocks_on_item_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_item_id ON stocks USING btree (item_id);
--
-- Name: index_stocks_on_minimum; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_minimum ON stocks USING btree (minimum);
--
-- Name: index_stocks_on_quantity; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_quantity ON stocks USING btree (quantity);
--
-- Name: index_stocks_on_store_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_store_id ON stocks USING btree (store_id);
--
-- Name: index_stocks_on_user_id; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_stocks_on_user_id ON stocks USING btree (user_id);
--
-- Name: index_tag_groups_on_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE UNIQUE INDEX index_tag_groups_on_name ON tag_groups USING btree (name);
--
-- Name: index_tag_groups_on_tag_ids; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_tag_groups_on_tag_ids ON tag_groups USING gin (tag_ids);
--
-- Name: index_tags_on_name; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE INDEX index_tags_on_name ON tags USING btree (name);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: bonsai; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
SET search_path = common, pg_catalog;
--
-- Name: index_common.links_on_organisation_id; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.links_on_organisation_id" ON links USING btree (organisation_id);
--
-- Name: index_common.links_on_tenant; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.links_on_tenant" ON links USING btree (tenant);
--
-- Name: index_common.links_on_user_id; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.links_on_user_id" ON links USING btree (user_id);
--
-- Name: index_common.organisations_on_country_id; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.organisations_on_country_id" ON organisations USING btree (country_id);
--
-- Name: index_common.organisations_on_currency; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.organisations_on_currency" ON organisations USING btree (currency);
--
-- Name: index_common.organisations_on_due_date; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.organisations_on_due_date" ON organisations USING btree (due_date);
--
-- Name: index_common.organisations_on_tenant; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX "index_common.organisations_on_tenant" ON organisations USING btree (tenant);
--
-- Name: index_common.users_on_auth_token; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX "index_common.users_on_auth_token" ON users USING btree (auth_token);
--
-- Name: index_common.users_on_confirmation_token; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX "index_common.users_on_confirmation_token" ON users USING btree (confirmation_token);
--
-- Name: index_common.users_on_email; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX "index_common.users_on_email" ON users USING btree (email);
--
-- Name: index_common.users_on_first_name; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.users_on_first_name" ON users USING btree (first_name);
--
-- Name: index_common.users_on_last_name; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX "index_common.users_on_last_name" ON users USING btree (last_name);
--
-- Name: index_links_on_api_token; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX index_links_on_api_token ON links USING btree (api_token);
--
-- Name: index_organizations_on_country_code; Type: INDEX; Schema: common; Owner: -
--
CREATE INDEX index_organizations_on_country_code ON organisations USING btree (country_code);
--
-- Name: index_shopping_stores_on_tenant; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX index_shopping_stores_on_tenant ON shopping_stores USING btree (tenant);
--
-- Name: index_shopping_stores_on_url; Type: INDEX; Schema: common; Owner: -
--
CREATE UNIQUE INDEX index_shopping_stores_on_url ON shopping_stores USING btree (url);
SET search_path = demo, pg_catalog;
--
-- Name: index_account_ledgers_on_account_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_id ON account_ledgers USING btree (account_id);
--
-- Name: index_account_ledgers_on_account_to_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_to_id ON account_ledgers USING btree (account_to_id);
--
-- Name: index_account_ledgers_on_contact_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_contact_id ON account_ledgers USING btree (contact_id);
--
-- Name: index_account_ledgers_on_currency; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_currency ON account_ledgers USING btree (currency);
--
-- Name: index_account_ledgers_on_date; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_date ON account_ledgers USING btree (date);
--
-- Name: index_account_ledgers_on_has_error; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_has_error ON account_ledgers USING btree (has_error);
--
-- Name: index_account_ledgers_on_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_account_ledgers_on_name ON account_ledgers USING btree (name);
--
-- Name: index_account_ledgers_on_operation; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_operation ON account_ledgers USING btree (operation);
--
-- Name: index_account_ledgers_on_project_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_project_id ON account_ledgers USING btree (project_id);
--
-- Name: index_account_ledgers_on_reference; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_reference ON account_ledgers USING gin (reference public.gin_trgm_ops);
--
-- Name: index_account_ledgers_on_status; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_status ON account_ledgers USING btree (status);
--
-- Name: index_account_ledgers_on_updater_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_account_ledgers_on_updater_id ON account_ledgers USING btree (updater_id);
--
-- Name: index_accounts_on_active; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_active ON accounts USING btree (active);
--
-- Name: index_accounts_on_amount; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_amount ON accounts USING btree (amount);
--
-- Name: index_accounts_on_approver_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_approver_id ON accounts USING btree (approver_id);
--
-- Name: index_accounts_on_contact_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_contact_id ON accounts USING btree (contact_id);
--
-- Name: index_accounts_on_creator_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_creator_id ON accounts USING btree (creator_id);
--
-- Name: index_accounts_on_currency; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_currency ON accounts USING btree (currency);
--
-- Name: index_accounts_on_date; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_date ON accounts USING btree (date);
--
-- Name: index_accounts_on_description; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_description ON accounts USING gin (description public.gin_trgm_ops);
--
-- Name: index_accounts_on_due_date; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_due_date ON accounts USING btree (due_date);
--
-- Name: index_accounts_on_extras; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_extras ON accounts USING gin (extras);
--
-- Name: index_accounts_on_has_error; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_has_error ON accounts USING btree (has_error);
--
-- Name: index_accounts_on_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_accounts_on_name ON accounts USING btree (name);
--
-- Name: index_accounts_on_nuller_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_nuller_id ON accounts USING btree (nuller_id);
--
-- Name: index_accounts_on_project_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_project_id ON accounts USING btree (project_id);
--
-- Name: index_accounts_on_state; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_state ON accounts USING btree (state);
--
-- Name: index_accounts_on_tag_ids; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_tag_ids ON accounts USING gin (tag_ids);
--
-- Name: index_accounts_on_tax_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_tax_id ON accounts USING btree (tax_id);
--
-- Name: index_accounts_on_tax_in_out; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_tax_in_out ON accounts USING btree (tax_in_out);
--
-- Name: index_accounts_on_type; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_type ON accounts USING btree (type);
--
-- Name: index_accounts_on_updater_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_accounts_on_updater_id ON accounts USING btree (updater_id);
--
-- Name: index_attachments_on_attachable_id_and_attachable_type; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_attachments_on_attachable_id_and_attachable_type ON attachments USING btree (attachable_id, attachable_type);
--
-- Name: index_attachments_on_image; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_attachments_on_image ON attachments USING btree (image);
--
-- Name: index_attachments_on_publish; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_attachments_on_publish ON attachments USING btree (publish);
--
-- Name: index_attachments_on_user_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_attachments_on_user_id ON attachments USING btree (user_id);
--
-- Name: index_contacts_on_active; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_active ON contacts USING btree (active);
--
-- Name: index_contacts_on_active_login; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_active_login ON contacts USING btree (active_login);
--
-- Name: index_contacts_on_client; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_client ON contacts USING btree (client);
--
-- Name: index_contacts_on_first_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_first_name ON contacts USING btree (first_name);
--
-- Name: index_contacts_on_last_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_last_name ON contacts USING btree (last_name);
--
-- Name: index_contacts_on_login; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_login ON contacts USING btree (login);
--
-- Name: index_contacts_on_matchcode; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_matchcode ON contacts USING btree (matchcode);
--
-- Name: index_contacts_on_staff; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_staff ON contacts USING btree (staff);
--
-- Name: index_contacts_on_supplier; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_supplier ON contacts USING btree (supplier);
--
-- Name: index_contacts_on_tag_ids; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_contacts_on_tag_ids ON contacts USING gin (tag_ids);
--
-- Name: index_histories_on_created_at; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_histories_on_created_at ON histories USING btree (created_at);
--
-- Name: index_histories_on_historiable_id_and_historiable_type; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_histories_on_historiable_id_and_historiable_type ON histories USING btree (historiable_id, historiable_type);
--
-- Name: index_histories_on_user_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_histories_on_user_id ON histories USING btree (user_id);
--
-- Name: index_inventories_on_updater_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventories_on_updater_id ON inventories USING btree (updater_id);
--
-- Name: index_inventory_details_on_inventory_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_details_on_inventory_id ON inventory_details USING btree (inventory_id);
--
-- Name: index_inventory_operation_details_on_item_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_item_id ON inventory_details USING btree (item_id);
--
-- Name: index_inventory_operation_details_on_store_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_store_id ON inventory_details USING btree (store_id);
--
-- Name: index_inventory_operations_on_account_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_account_id ON inventories USING btree (account_id);
--
-- Name: index_inventory_operations_on_contact_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_contact_id ON inventories USING btree (contact_id);
--
-- Name: index_inventory_operations_on_date; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_date ON inventories USING btree (date);
--
-- Name: index_inventory_operations_on_has_error; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_has_error ON inventories USING btree (has_error);
--
-- Name: index_inventory_operations_on_operation; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_operation ON inventories USING btree (operation);
--
-- Name: index_inventory_operations_on_project_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_project_id ON inventories USING btree (project_id);
--
-- Name: index_inventory_operations_on_ref_number; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_ref_number ON inventories USING btree (ref_number);
--
-- Name: index_inventory_operations_on_store_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_inventory_operations_on_store_id ON inventories USING btree (store_id);
--
-- Name: index_items_on_code; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_code ON items USING btree (code);
--
-- Name: index_items_on_creator_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_creator_id ON items USING btree (creator_id);
--
-- Name: index_items_on_for_sale; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_for_sale ON items USING btree (for_sale);
--
-- Name: index_items_on_publish; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_publish ON items USING btree (publish);
--
-- Name: index_items_on_stockable; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_stockable ON items USING btree (stockable);
--
-- Name: index_items_on_tag_ids; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_tag_ids ON items USING gin (tag_ids);
--
-- Name: index_items_on_unit_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_unit_id ON items USING btree (unit_id);
--
-- Name: index_items_on_updater_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_items_on_updater_id ON items USING btree (updater_id);
--
-- Name: index_links_on_organisation_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_links_on_organisation_id ON links USING btree (organisation_id);
--
-- Name: index_links_on_tenant; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_links_on_tenant ON links USING btree (tenant);
--
-- Name: index_links_on_user_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_links_on_user_id ON links USING btree (user_id);
--
-- Name: index_movement_details_on_account_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_movement_details_on_account_id ON movement_details USING btree (account_id);
--
-- Name: index_movement_details_on_item_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_movement_details_on_item_id ON movement_details USING btree (item_id);
--
-- Name: index_organisations_on_country_code; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_organisations_on_country_code ON organisations USING btree (country_code);
--
-- Name: index_organisations_on_country_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_organisations_on_country_id ON organisations USING btree (country_id);
--
-- Name: index_organisations_on_currency; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_organisations_on_currency ON organisations USING btree (currency);
--
-- Name: index_organisations_on_due_date; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_organisations_on_due_date ON organisations USING btree (due_date);
--
-- Name: index_organisations_on_tenant; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_organisations_on_tenant ON organisations USING btree (tenant);
--
-- Name: index_projects_on_active; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_projects_on_active ON projects USING btree (active);
--
-- Name: index_stocks_on_active; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_active ON stocks USING btree (active);
--
-- Name: index_stocks_on_item_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_item_id ON stocks USING btree (item_id);
--
-- Name: index_stocks_on_minimum; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_minimum ON stocks USING btree (minimum);
--
-- Name: index_stocks_on_quantity; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_quantity ON stocks USING btree (quantity);
--
-- Name: index_stocks_on_store_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_store_id ON stocks USING btree (store_id);
--
-- Name: index_stocks_on_user_id; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_stocks_on_user_id ON stocks USING btree (user_id);
--
-- Name: index_tag_groups_on_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_tag_groups_on_name ON tag_groups USING btree (name);
--
-- Name: index_tag_groups_on_tag_ids; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_tag_groups_on_tag_ids ON tag_groups USING gin (tag_ids);
--
-- Name: index_tags_on_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_tags_on_name ON tags USING btree (name);
--
-- Name: index_users_on_auth_token; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_users_on_auth_token ON users USING btree (auth_token);
--
-- Name: index_users_on_confirmation_token; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_users_on_confirmation_token ON users USING btree (confirmation_token);
--
-- Name: index_users_on_email; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX index_users_on_email ON users USING btree (email);
--
-- Name: index_users_on_first_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_users_on_first_name ON users USING btree (first_name);
--
-- Name: index_users_on_last_name; Type: INDEX; Schema: demo; Owner: -
--
CREATE INDEX index_users_on_last_name ON users USING btree (last_name);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: demo; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
SET search_path = flor, pg_catalog;
--
-- Name: index_account_ledgers_on_account_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_id ON account_ledgers USING btree (account_id);
--
-- Name: index_account_ledgers_on_account_to_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_to_id ON account_ledgers USING btree (account_to_id);
--
-- Name: index_account_ledgers_on_contact_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_contact_id ON account_ledgers USING btree (contact_id);
--
-- Name: index_account_ledgers_on_currency; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_currency ON account_ledgers USING btree (currency);
--
-- Name: index_account_ledgers_on_date; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_date ON account_ledgers USING btree (date);
--
-- Name: index_account_ledgers_on_has_error; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_has_error ON account_ledgers USING btree (has_error);
--
-- Name: index_account_ledgers_on_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_account_ledgers_on_name ON account_ledgers USING btree (name);
--
-- Name: index_account_ledgers_on_operation; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_operation ON account_ledgers USING btree (operation);
--
-- Name: index_account_ledgers_on_project_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_project_id ON account_ledgers USING btree (project_id);
--
-- Name: index_account_ledgers_on_reference; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_reference ON account_ledgers USING gin (reference public.gin_trgm_ops);
--
-- Name: index_account_ledgers_on_status; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_status ON account_ledgers USING btree (status);
--
-- Name: index_account_ledgers_on_updater_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_account_ledgers_on_updater_id ON account_ledgers USING btree (updater_id);
--
-- Name: index_accounts_on_active; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_active ON accounts USING btree (active);
--
-- Name: index_accounts_on_amount; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_amount ON accounts USING btree (amount);
--
-- Name: index_accounts_on_approver_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_approver_id ON accounts USING btree (approver_id);
--
-- Name: index_accounts_on_contact_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_contact_id ON accounts USING btree (contact_id);
--
-- Name: index_accounts_on_creator_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_creator_id ON accounts USING btree (creator_id);
--
-- Name: index_accounts_on_currency; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_currency ON accounts USING btree (currency);
--
-- Name: index_accounts_on_date; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_date ON accounts USING btree (date);
--
-- Name: index_accounts_on_description; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_description ON accounts USING gin (description public.gin_trgm_ops);
--
-- Name: index_accounts_on_due_date; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_due_date ON accounts USING btree (due_date);
--
-- Name: index_accounts_on_extras; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_extras ON accounts USING gin (extras);
--
-- Name: index_accounts_on_has_error; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_has_error ON accounts USING btree (has_error);
--
-- Name: index_accounts_on_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_accounts_on_name ON accounts USING btree (name);
--
-- Name: index_accounts_on_nuller_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_nuller_id ON accounts USING btree (nuller_id);
--
-- Name: index_accounts_on_project_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_project_id ON accounts USING btree (project_id);
--
-- Name: index_accounts_on_state; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_state ON accounts USING btree (state);
--
-- Name: index_accounts_on_tag_ids; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_tag_ids ON accounts USING gin (tag_ids);
--
-- Name: index_accounts_on_tax_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_tax_id ON accounts USING btree (tax_id);
--
-- Name: index_accounts_on_tax_in_out; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_tax_in_out ON accounts USING btree (tax_in_out);
--
-- Name: index_accounts_on_type; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_type ON accounts USING btree (type);
--
-- Name: index_accounts_on_updater_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_accounts_on_updater_id ON accounts USING btree (updater_id);
--
-- Name: index_attachments_on_attachable_id_and_attachable_type; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_attachments_on_attachable_id_and_attachable_type ON attachments USING btree (attachable_id, attachable_type);
--
-- Name: index_attachments_on_image; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_attachments_on_image ON attachments USING btree (image);
--
-- Name: index_attachments_on_publish; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_attachments_on_publish ON attachments USING btree (publish);
--
-- Name: index_attachments_on_user_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_attachments_on_user_id ON attachments USING btree (user_id);
--
-- Name: index_contacts_on_active; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_active ON contacts USING btree (active);
--
-- Name: index_contacts_on_active_login; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_active_login ON contacts USING btree (active_login);
--
-- Name: index_contacts_on_client; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_client ON contacts USING btree (client);
--
-- Name: index_contacts_on_first_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_first_name ON contacts USING btree (first_name);
--
-- Name: index_contacts_on_last_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_last_name ON contacts USING btree (last_name);
--
-- Name: index_contacts_on_login; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_login ON contacts USING btree (login);
--
-- Name: index_contacts_on_matchcode; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_matchcode ON contacts USING btree (matchcode);
--
-- Name: index_contacts_on_staff; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_staff ON contacts USING btree (staff);
--
-- Name: index_contacts_on_supplier; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_supplier ON contacts USING btree (supplier);
--
-- Name: index_contacts_on_tag_ids; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_contacts_on_tag_ids ON contacts USING gin (tag_ids);
--
-- Name: index_histories_on_created_at; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_histories_on_created_at ON histories USING btree (created_at);
--
-- Name: index_histories_on_historiable_id_and_historiable_type; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_histories_on_historiable_id_and_historiable_type ON histories USING btree (historiable_id, historiable_type);
--
-- Name: index_histories_on_user_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_histories_on_user_id ON histories USING btree (user_id);
--
-- Name: index_inventories_on_updater_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventories_on_updater_id ON inventories USING btree (updater_id);
--
-- Name: index_inventory_details_on_inventory_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_details_on_inventory_id ON inventory_details USING btree (inventory_id);
--
-- Name: index_inventory_operation_details_on_item_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_item_id ON inventory_details USING btree (item_id);
--
-- Name: index_inventory_operation_details_on_store_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_store_id ON inventory_details USING btree (store_id);
--
-- Name: index_inventory_operations_on_account_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_account_id ON inventories USING btree (account_id);
--
-- Name: index_inventory_operations_on_contact_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_contact_id ON inventories USING btree (contact_id);
--
-- Name: index_inventory_operations_on_date; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_date ON inventories USING btree (date);
--
-- Name: index_inventory_operations_on_has_error; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_has_error ON inventories USING btree (has_error);
--
-- Name: index_inventory_operations_on_operation; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_operation ON inventories USING btree (operation);
--
-- Name: index_inventory_operations_on_project_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_project_id ON inventories USING btree (project_id);
--
-- Name: index_inventory_operations_on_ref_number; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_ref_number ON inventories USING btree (ref_number);
--
-- Name: index_inventory_operations_on_store_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_inventory_operations_on_store_id ON inventories USING btree (store_id);
--
-- Name: index_items_on_code; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_code ON items USING btree (code);
--
-- Name: index_items_on_creator_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_creator_id ON items USING btree (creator_id);
--
-- Name: index_items_on_for_sale; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_for_sale ON items USING btree (for_sale);
--
-- Name: index_items_on_publish; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_publish ON items USING btree (publish);
--
-- Name: index_items_on_stockable; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_stockable ON items USING btree (stockable);
--
-- Name: index_items_on_tag_ids; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_tag_ids ON items USING gin (tag_ids);
--
-- Name: index_items_on_unit_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_unit_id ON items USING btree (unit_id);
--
-- Name: index_items_on_updater_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_items_on_updater_id ON items USING btree (updater_id);
--
-- Name: index_links_on_organisation_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_links_on_organisation_id ON links USING btree (organisation_id);
--
-- Name: index_links_on_tenant; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_links_on_tenant ON links USING btree (tenant);
--
-- Name: index_links_on_user_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_links_on_user_id ON links USING btree (user_id);
--
-- Name: index_movement_details_on_account_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_movement_details_on_account_id ON movement_details USING btree (account_id);
--
-- Name: index_movement_details_on_item_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_movement_details_on_item_id ON movement_details USING btree (item_id);
--
-- Name: index_organisations_on_country_code; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_organisations_on_country_code ON organisations USING btree (country_code);
--
-- Name: index_organisations_on_country_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_organisations_on_country_id ON organisations USING btree (country_id);
--
-- Name: index_organisations_on_currency; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_organisations_on_currency ON organisations USING btree (currency);
--
-- Name: index_organisations_on_due_date; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_organisations_on_due_date ON organisations USING btree (due_date);
--
-- Name: index_organisations_on_tenant; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_organisations_on_tenant ON organisations USING btree (tenant);
--
-- Name: index_projects_on_active; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_projects_on_active ON projects USING btree (active);
--
-- Name: index_stocks_on_active; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_active ON stocks USING btree (active);
--
-- Name: index_stocks_on_item_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_item_id ON stocks USING btree (item_id);
--
-- Name: index_stocks_on_minimum; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_minimum ON stocks USING btree (minimum);
--
-- Name: index_stocks_on_quantity; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_quantity ON stocks USING btree (quantity);
--
-- Name: index_stocks_on_store_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_store_id ON stocks USING btree (store_id);
--
-- Name: index_stocks_on_user_id; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_stocks_on_user_id ON stocks USING btree (user_id);
--
-- Name: index_tag_groups_on_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_tag_groups_on_name ON tag_groups USING btree (name);
--
-- Name: index_tag_groups_on_tag_ids; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_tag_groups_on_tag_ids ON tag_groups USING gin (tag_ids);
--
-- Name: index_tags_on_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_tags_on_name ON tags USING btree (name);
--
-- Name: index_users_on_auth_token; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_users_on_auth_token ON users USING btree (auth_token);
--
-- Name: index_users_on_confirmation_token; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_users_on_confirmation_token ON users USING btree (confirmation_token);
--
-- Name: index_users_on_email; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX index_users_on_email ON users USING btree (email);
--
-- Name: index_users_on_first_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_users_on_first_name ON users USING btree (first_name);
--
-- Name: index_users_on_last_name; Type: INDEX; Schema: flor; Owner: -
--
CREATE INDEX index_users_on_last_name ON users USING btree (last_name);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: flor; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
SET search_path = public, pg_catalog;
--
-- Name: index_account_ledgers_on_account_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_id ON account_ledgers USING btree (account_id);
--
-- Name: index_account_ledgers_on_account_to_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_account_to_id ON account_ledgers USING btree (account_to_id);
--
-- Name: index_account_ledgers_on_contact_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_contact_id ON account_ledgers USING btree (contact_id);
--
-- Name: index_account_ledgers_on_currency; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_currency ON account_ledgers USING btree (currency);
--
-- Name: index_account_ledgers_on_date; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_date ON account_ledgers USING btree (date);
--
-- Name: index_account_ledgers_on_has_error; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_has_error ON account_ledgers USING btree (has_error);
--
-- Name: index_account_ledgers_on_name; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_account_ledgers_on_name ON account_ledgers USING btree (name);
--
-- Name: index_account_ledgers_on_operation; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_operation ON account_ledgers USING btree (operation);
--
-- Name: index_account_ledgers_on_project_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_project_id ON account_ledgers USING btree (project_id);
--
-- Name: index_account_ledgers_on_reference; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_reference ON account_ledgers USING gin (reference gin_trgm_ops);
--
-- Name: index_account_ledgers_on_status; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_status ON account_ledgers USING btree (status);
--
-- Name: index_account_ledgers_on_updater_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_account_ledgers_on_updater_id ON account_ledgers USING btree (updater_id);
--
-- Name: index_accounts_on_active; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_active ON accounts USING btree (active);
--
-- Name: index_accounts_on_amount; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_amount ON accounts USING btree (amount);
--
-- Name: index_accounts_on_approver_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_approver_id ON accounts USING btree (approver_id);
--
-- Name: index_accounts_on_contact_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_contact_id ON accounts USING btree (contact_id);
--
-- Name: index_accounts_on_creator_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_creator_id ON accounts USING btree (creator_id);
--
-- Name: index_accounts_on_currency; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_currency ON accounts USING btree (currency);
--
-- Name: index_accounts_on_date; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_date ON accounts USING btree (date);
--
-- Name: index_accounts_on_description; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_description ON accounts USING gin (description gin_trgm_ops);
--
-- Name: index_accounts_on_due_date; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_due_date ON accounts USING btree (due_date);
--
-- Name: index_accounts_on_extras; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_extras ON accounts USING gin (extras);
--
-- Name: index_accounts_on_has_error; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_has_error ON accounts USING btree (has_error);
--
-- Name: index_accounts_on_name; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_accounts_on_name ON accounts USING btree (name);
--
-- Name: index_accounts_on_nuller_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_nuller_id ON accounts USING btree (nuller_id);
--
-- Name: index_accounts_on_project_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_project_id ON accounts USING btree (project_id);
--
-- Name: index_accounts_on_state; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_state ON accounts USING btree (state);
--
-- Name: index_accounts_on_tag_ids; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_tag_ids ON accounts USING gin (tag_ids);
--
-- Name: index_accounts_on_tax_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_tax_id ON accounts USING btree (tax_id);
--
-- Name: index_accounts_on_tax_in_out; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_tax_in_out ON accounts USING btree (tax_in_out);
--
-- Name: index_accounts_on_type; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_type ON accounts USING btree (type);
--
-- Name: index_accounts_on_updater_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_accounts_on_updater_id ON accounts USING btree (updater_id);
--
-- Name: index_attachments_on_attachable_id_and_attachable_type; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_attachments_on_attachable_id_and_attachable_type ON attachments USING btree (attachable_id, attachable_type);
--
-- Name: index_attachments_on_image; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_attachments_on_image ON attachments USING btree (image);
--
-- Name: index_attachments_on_publish; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_attachments_on_publish ON attachments USING btree (publish);
--
-- Name: index_attachments_on_user_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_attachments_on_user_id ON attachments USING btree (user_id);
--
-- Name: index_contacts_on_active; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_active ON contacts USING btree (active);
--
-- Name: index_contacts_on_active_login; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_active_login ON contacts USING btree (active_login);
--
-- Name: index_contacts_on_client; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_client ON contacts USING btree (client);
--
-- Name: index_contacts_on_first_name; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_first_name ON contacts USING btree (first_name);
--
-- Name: index_contacts_on_last_name; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_last_name ON contacts USING btree (last_name);
--
-- Name: index_contacts_on_login; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_login ON contacts USING btree (login);
--
-- Name: index_contacts_on_matchcode; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_matchcode ON contacts USING btree (matchcode);
--
-- Name: index_contacts_on_staff; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_staff ON contacts USING btree (staff);
--
-- Name: index_contacts_on_supplier; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_supplier ON contacts USING btree (supplier);
--
-- Name: index_contacts_on_tag_ids; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_contacts_on_tag_ids ON contacts USING gin (tag_ids);
--
-- Name: index_histories_on_created_at; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_histories_on_created_at ON histories USING btree (created_at);
--
-- Name: index_histories_on_historiable_id_and_historiable_type; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_histories_on_historiable_id_and_historiable_type ON histories USING btree (historiable_id, historiable_type);
--
-- Name: index_histories_on_user_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_histories_on_user_id ON histories USING btree (user_id);
--
-- Name: index_inventories_on_updater_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventories_on_updater_id ON inventories USING btree (updater_id);
--
-- Name: index_inventory_details_on_inventory_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_details_on_inventory_id ON inventory_details USING btree (inventory_id);
--
-- Name: index_inventory_operation_details_on_item_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_item_id ON inventory_details USING btree (item_id);
--
-- Name: index_inventory_operation_details_on_store_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operation_details_on_store_id ON inventory_details USING btree (store_id);
--
-- Name: index_inventory_operations_on_account_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_account_id ON inventories USING btree (account_id);
--
-- Name: index_inventory_operations_on_contact_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_contact_id ON inventories USING btree (contact_id);
--
-- Name: index_inventory_operations_on_date; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_date ON inventories USING btree (date);
--
-- Name: index_inventory_operations_on_has_error; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_has_error ON inventories USING btree (has_error);
--
-- Name: index_inventory_operations_on_operation; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_operation ON inventories USING btree (operation);
--
-- Name: index_inventory_operations_on_project_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_project_id ON inventories USING btree (project_id);
--
-- Name: index_inventory_operations_on_ref_number; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_ref_number ON inventories USING btree (ref_number);
--
-- Name: index_inventory_operations_on_store_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_inventory_operations_on_store_id ON inventories USING btree (store_id);
--
-- Name: index_items_on_code; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_code ON items USING btree (code);
--
-- Name: index_items_on_creator_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_creator_id ON items USING btree (creator_id);
--
-- Name: index_items_on_for_sale; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_for_sale ON items USING btree (for_sale);
--
-- Name: index_items_on_publish; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_publish ON items USING btree (publish);
--
-- Name: index_items_on_stockable; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_stockable ON items USING btree (stockable);
--
-- Name: index_items_on_tag_ids; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_tag_ids ON items USING gin (tag_ids);
--
-- Name: index_items_on_unit_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_unit_id ON items USING btree (unit_id);
--
-- Name: index_items_on_updater_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_items_on_updater_id ON items USING btree (updater_id);
--
-- Name: index_links_on_api_token; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_links_on_api_token ON links USING btree (api_token);
--
-- Name: index_links_on_organisation_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_links_on_organisation_id ON links USING btree (organisation_id);
--
-- Name: index_links_on_tenant; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_links_on_tenant ON links USING btree (tenant);
--
-- Name: index_links_on_user_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_links_on_user_id ON links USING btree (user_id);
--
-- Name: index_movement_details_on_account_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_movement_details_on_account_id ON movement_details USING btree (account_id);
--
-- Name: index_movement_details_on_item_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_movement_details_on_item_id ON movement_details USING btree (item_id);
--
-- Name: index_organizations_on_country_code; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_organizations_on_country_code ON organisations USING btree (country_code);
--
-- Name: index_organizations_on_country_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_organizations_on_country_id ON organisations USING btree (country_id);
--
-- Name: index_organizations_on_currency; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_organizations_on_currency ON organisations USING btree (currency);
--
-- Name: index_organizations_on_due_date; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_organizations_on_due_date ON organisations USING btree (due_date);
--
-- Name: index_organizations_on_tenant; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_organizations_on_tenant ON organisations USING btree (tenant);
--
-- Name: index_projects_on_active; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_projects_on_active ON projects USING btree (active);
--
-- Name: index_shopping_stores_on_tenant; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_shopping_stores_on_tenant ON shopping_stores USING btree (tenant);
--
-- Name: index_shopping_stores_on_url; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_shopping_stores_on_url ON shopping_stores USING btree (url);
--
-- Name: index_stocks_on_active; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_active ON stocks USING btree (active);
--
-- Name: index_stocks_on_item_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_item_id ON stocks USING btree (item_id);
--
-- Name: index_stocks_on_minimum; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_minimum ON stocks USING btree (minimum);
--
-- Name: index_stocks_on_quantity; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_quantity ON stocks USING btree (quantity);
--
-- Name: index_stocks_on_store_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_store_id ON stocks USING btree (store_id);
--
-- Name: index_stocks_on_user_id; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_stocks_on_user_id ON stocks USING btree (user_id);
--
-- Name: index_tag_groups_on_name; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_tag_groups_on_name ON tag_groups USING btree (name);
--
-- Name: index_tag_groups_on_tag_ids; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_tag_groups_on_tag_ids ON tag_groups USING gin (tag_ids);
--
-- Name: index_tags_on_name; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_tags_on_name ON tags USING btree (name);
--
-- Name: index_users_on_auth_token; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_users_on_auth_token ON users USING btree (auth_token);
--
-- Name: index_users_on_confirmation_token; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_users_on_confirmation_token ON users USING btree (confirmation_token);
--
-- Name: index_users_on_email; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX index_users_on_email ON users USING btree (email);
--
-- Name: index_users_on_first_name; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_users_on_first_name ON users USING btree (first_name);
--
-- Name: index_users_on_last_name; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX index_users_on_last_name ON users USING btree (last_name);
--
-- Name: unique_schema_migrations; Type: INDEX; Schema: public; Owner: -
--
CREATE UNIQUE INDEX unique_schema_migrations ON schema_migrations USING btree (version);
--
-- PostgreSQL database dump complete
--
SET search_path TO public;
INSERT INTO public.schema_migrations (version) VALUES ('20100101101010');
INSERT INTO public.schema_migrations (version) VALUES ('20100324202441');
INSERT INTO public.schema_migrations (version) VALUES ('20100325221629');
INSERT INTO public.schema_migrations (version) VALUES ('20100401192000');
INSERT INTO public.schema_migrations (version) VALUES ('20100416193705');
INSERT INTO public.schema_migrations (version) VALUES ('20100421174307');
INSERT INTO public.schema_migrations (version) VALUES ('20100427190727');
INSERT INTO public.schema_migrations (version) VALUES ('20100531141109');
INSERT INTO public.schema_migrations (version) VALUES ('20110119140408');
INSERT INTO public.schema_migrations (version) VALUES ('20110201153434');
INSERT INTO public.schema_migrations (version) VALUES ('20110201161907');
INSERT INTO public.schema_migrations (version) VALUES ('20110411174426');
INSERT INTO public.schema_migrations (version) VALUES ('20110411182005');
INSERT INTO public.schema_migrations (version) VALUES ('20110411182905');
INSERT INTO public.schema_migrations (version) VALUES ('20111103143524');
INSERT INTO public.schema_migrations (version) VALUES ('20121215153208');
INSERT INTO public.schema_migrations (version) VALUES ('20130114144400');
INSERT INTO public.schema_migrations (version) VALUES ('20130114164401');
INSERT INTO public.schema_migrations (version) VALUES ('20130115020409');
INSERT INTO public.schema_migrations (version) VALUES ('20130204171801');
INSERT INTO public.schema_migrations (version) VALUES ('20130221151829');
INSERT INTO public.schema_migrations (version) VALUES ('20130325155351');
INSERT INTO public.schema_migrations (version) VALUES ('20130411141221');
INSERT INTO public.schema_migrations (version) VALUES ('20130426151609');
INSERT INTO public.schema_migrations (version) VALUES ('20130429120114');
INSERT INTO public.schema_migrations (version) VALUES ('20130510144731');
INSERT INTO public.schema_migrations (version) VALUES ('20130510222719');
INSERT INTO public.schema_migrations (version) VALUES ('20130522125737');
INSERT INTO public.schema_migrations (version) VALUES ('20130527202406');
INSERT INTO public.schema_migrations (version) VALUES ('20130618172158');
INSERT INTO public.schema_migrations (version) VALUES ('20130618184031');
INSERT INTO public.schema_migrations (version) VALUES ('20130702144114');
INSERT INTO public.schema_migrations (version) VALUES ('20130704130428');
INSERT INTO public.schema_migrations (version) VALUES ('20130715185912');
INSERT INTO public.schema_migrations (version) VALUES ('20130716131229');
INSERT INTO public.schema_migrations (version) VALUES ('20130716131801');
INSERT INTO public.schema_migrations (version) VALUES ('20130717190543');
INSERT INTO public.schema_migrations (version) VALUES ('20130911005608');
INSERT INTO public.schema_migrations (version) VALUES ('20131009131456');
INSERT INTO public.schema_migrations (version) VALUES ('20131009141203');
INSERT INTO public.schema_migrations (version) VALUES ('20131211134555');
INSERT INTO public.schema_migrations (version) VALUES ('20131221130149');
INSERT INTO public.schema_migrations (version) VALUES ('20131223155017');
INSERT INTO public.schema_migrations (version) VALUES ('20131224080216');
INSERT INTO public.schema_migrations (version) VALUES ('20131224080916');
INSERT INTO public.schema_migrations (version) VALUES ('20131224081504');
INSERT INTO public.schema_migrations (version) VALUES ('20131227025934');
INSERT INTO public.schema_migrations (version) VALUES ('20131227032328');
INSERT INTO public.schema_migrations (version) VALUES ('20131229164735');
INSERT INTO public.schema_migrations (version) VALUES ('20140105165519');
INSERT INTO public.schema_migrations (version) VALUES ('20140118184207');
INSERT INTO public.schema_migrations (version) VALUES ('20140127023427');
INSERT INTO public.schema_migrations (version) VALUES ('20140127025407');
INSERT INTO public.schema_migrations (version) VALUES ('20140129135140');
INSERT INTO public.schema_migrations (version) VALUES ('20140131140212');
INSERT INTO public.schema_migrations (version) VALUES ('20140205123754');
INSERT INTO public.schema_migrations (version) VALUES ('20140213135130');
INSERT INTO public.schema_migrations (version) VALUES ('20140215130814');
INSERT INTO public.schema_migrations (version) VALUES ('20140217120803');
INSERT INTO public.schema_migrations (version) VALUES ('20140217134723');
INSERT INTO public.schema_migrations (version) VALUES ('20140219170720');
INSERT INTO public.schema_migrations (version) VALUES ('20140219210139');
INSERT INTO public.schema_migrations (version) VALUES ('20140219210551');
INSERT INTO public.schema_migrations (version) VALUES ('20140227163833');
INSERT INTO public.schema_migrations (version) VALUES ('20140417145820');
INSERT INTO public.schema_migrations (version) VALUES ('20140423120216');
INSERT INTO public.schema_migrations (version) VALUES ('20140603135208');
INSERT INTO public.schema_migrations (version) VALUES ('20140704132611');
INSERT INTO public.schema_migrations (version) VALUES ('20140730171947');
INSERT INTO public.schema_migrations (version) VALUES ('20140828122720');
INSERT INTO public.schema_migrations (version) VALUES ('20140925003650');
INSERT INTO public.schema_migrations (version) VALUES ('20141002222739');
INSERT INTO public.schema_migrations (version) VALUES ('20141003120149');
INSERT INTO public.schema_migrations (version) VALUES ('20141003120627');
INSERT INTO public.schema_migrations (version) VALUES ('20141003183936');
INSERT INTO public.schema_migrations (version) VALUES ('20141009125447');
INSERT INTO public.schema_migrations (version) VALUES ('20141028104251');
INSERT INTO public.schema_migrations (version) VALUES ('20141031122615');
INSERT INTO public.schema_migrations (version) VALUES ('20141112132422');
INSERT INTO public.schema_migrations (version) VALUES ('20160211130733');
INSERT INTO public.schema_migrations (version) VALUES ('20160215132803');
INSERT INTO public.schema_migrations (version) VALUES ('20160215133105');
INSERT INTO public.schema_migrations (version) VALUES ('20160215135420');
INSERT INTO public.schema_migrations (version) VALUES ('20160531122636');
INSERT INTO public.schema_migrations (version) VALUES ('20160602111033'); | the_stack |
-- 1. Update latest_update field to new date
DO $_$
BEGIN
PERFORM VOCABULARY_PACK.SetLatestUpdate(
pVocabularyName => 'MedDRA',
pVocabularyDate => (SELECT vocabulary_date FROM sources.hlt_pref_comp LIMIT 1),
pVocabularyVersion => (SELECT vocabulary_version FROM sources.hlt_pref_comp LIMIT 1),
pVocabularyDevSchema => 'DEV_MEDDRA'
);
END $_$;
-- 2. Truncate all working tables
TRUNCATE TABLE concept_stage;
TRUNCATE TABLE concept_relationship_stage;
TRUNCATE TABLE concept_synonym_stage;
TRUNCATE TABLE pack_content_stage;
TRUNCATE TABLE drug_strength_stage;
--3. Insert into concept_stage
INSERT INTO concept_stage (
concept_name,
vocabulary_id,
domain_id,
concept_class_id,
standard_concept,
concept_code,
valid_start_date,
valid_end_date,
invalid_reason
)
SELECT soc_name AS concept_name,
'MedDRA' AS vocabulary_id,
NULL AS domain_id,
'SOC' AS concept_class_id,
'C' AS standard_concept,
soc_code AS concept_code,
(
SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'
),
TO_DATE('20991231', 'yyyymmdd'),
NULL
FROM SOURCES.soc_term
UNION ALL
SELECT hlgt_name AS concept_name,
'MedDRA' AS vocabulary_id,
NULL AS domain_id,
'HLGT' AS concept_class_id,
'C' AS standard_concept,
hlgt_code AS concept_code,
(
SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'
),
TO_DATE('20991231', 'yyyymmdd'),
NULL
FROM SOURCES.hlgt_pref_term
UNION ALL
SELECT hlt_name AS concept_name,
'MedDRA' AS vocabulary_id,
NULL AS domain_id,
'HLT' AS concept_class_id,
'C' AS standard_concept,
hlt_code AS concept_code,
(
SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'
),
TO_DATE('20991231', 'yyyymmdd'),
NULL
FROM SOURCES.hlt_pref_term
UNION ALL
SELECT pt_name AS concept_name,
'MedDRA' AS vocabulary_id,
NULL AS domain_id,
'PT' AS concept_class_id,
'C' AS standard_concept,
pt_code AS concept_code,
(
SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'
),
TO_DATE('20991231', 'yyyymmdd'),
NULL
FROM SOURCES.pref_term
UNION ALL
SELECT llt_name AS concept_name,
'MedDRA' AS vocabulary_id,
NULL AS domain_id,
'LLT' AS concept_class_id,
'C' AS standard_concept,
llt_code AS concept_code,
(
SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'
),
TO_DATE('20991231', 'yyyymmdd'),
NULL
FROM SOURCES.low_level_term
WHERE llt_currency = 'Y'
AND llt_code <> pt_code;
--4. Update domain_id
WITH t_domains
AS (
--LLT level
SELECT llt_code AS concept_code,
CASE
--pt level
WHEN pt_name ~* 'monitoring|centesis|imaging|screen'
THEN 'Procedure'
--hlt level
WHEN hlt_name ~* 'exposures|Physical examination procedures and organ system status'
THEN 'Observation'
WHEN hlt_name ~* 'histopathology|imaging|procedure'
THEN 'Procedure'
WHEN hlt_name = 'Gene mutations and other alterations NEC'
THEN 'Measurement'
--hlgt level
WHEN hlgt_name = 'Therapeutic and nontherapeutic effects (excl toxicity)'
THEN 'Observation'
--soc level
WHEN soc_name ~ 'disorders|Infections|Neoplasms|Injury, poisoning and procedural complications|Pregnancy, puerperium and perinatal conditions'
THEN 'Condition'
WHEN soc_name ~ 'Surgical and medical procedures'
THEN 'Procedure'
WHEN soc_name IN (
'Product issues',
'Social circumstances'
)
THEN 'Observation'
WHEN soc_name = 'Investigations'
THEN 'Measurement'
ELSE 'Undefined'
END AS domain_id
FROM SOURCES.md_hierarchy h
JOIN SOURCES.low_level_term l ON l.pt_code = h.pt_code
AND llt_currency = 'Y'
WHERE primary_soc_fg = 'Y'
UNION
-- pt level
SELECT pt_code AS concept_code,
CASE
--pt level
WHEN pt_name ~* 'monitoring|centesis|imaging|screen'
THEN 'Procedure'
--hlt level
WHEN hlt_name ~* 'exposures|Physical examination procedures and organ system status'
THEN 'Observation'
WHEN hlt_name ~* 'histopathology|imaging|procedure'
THEN 'Procedure'
WHEN hlt_name = 'Gene mutations and other alterations NEC'
THEN 'Measurement'
--hlgt level
WHEN hlgt_name = 'Therapeutic and nontherapeutic effects (excl toxicity)'
THEN 'Observation'
--soc level
WHEN soc_name ~ 'disorders|Infections|Neoplasms|Injury, poisoning and procedural complications|Pregnancy, puerperium and perinatal conditions'
THEN 'Condition'
WHEN soc_name ~ 'Surgical and medical procedures'
THEN 'Procedure'
WHEN soc_name IN (
'Product issues',
'Social circumstances'
)
THEN 'Observation'
WHEN soc_name = 'Investigations'
THEN 'Measurement'
ELSE 'Undefined'
END AS domain_id
FROM SOURCES.md_hierarchy h
WHERE primary_soc_fg = 'Y'
UNION
--hlt level
SELECT hlt_code AS concept_code,
CASE
--hlt level
WHEN hlt_name ~* 'exposures|Physical examination procedures and organ system status'
THEN 'Observation'
WHEN hlt_name ~* 'histopathology|imaging|procedure'
THEN 'Procedure'
WHEN hlt_name = 'Gene mutations and other alterations NEC'
THEN 'Measurement'
--hlgt level
WHEN hlgt_name = 'Therapeutic and nontherapeutic effects (excl toxicity)'
THEN 'Observation'
--soc level
WHEN soc_name ~ 'disorders|Infections|Neoplasms|Injury, poisoning and procedural complications|Pregnancy, puerperium and perinatal conditions'
THEN 'Condition'
WHEN soc_name ~ 'Surgical and medical procedures'
THEN 'Procedure'
WHEN soc_name IN (
'Product issues',
'Social circumstances'
)
THEN 'Observation'
WHEN soc_name = 'Investigations'
THEN 'Measurement'
ELSE 'Undefined'
END AS domain_id
FROM SOURCES.md_hierarchy h
WHERE primary_soc_fg = 'Y'
UNION
--hlgt level
SELECT hlgt_code AS concept_code,
CASE
--hlgt level
WHEN hlgt_name = 'Therapeutic and nontherapeutic effects (excl toxicity)'
THEN 'Observation'
--soc level
WHEN soc_name ~ 'disorders|Infections|Neoplasms|Injury, poisoning and procedural complications|Pregnancy, puerperium and perinatal conditions'
THEN 'Condition'
WHEN soc_name ~ 'Surgical and medical procedures'
THEN 'Procedure'
WHEN soc_name IN (
'Product issues',
'Social circumstances'
)
THEN 'Observation'
WHEN soc_name = 'Investigations'
THEN 'Measurement'
ELSE 'Undefined'
END AS domain_id
FROM SOURCES.md_hierarchy h
WHERE primary_soc_fg = 'Y'
UNION
--soc level
SELECT soc_code AS concept_code,
CASE
--soc level
WHEN soc_name ~ 'disorders|Infections|Neoplasms|Injury, poisoning and procedural complications|Pregnancy, puerperium and perinatal conditions'
THEN 'Condition'
WHEN soc_name ~ 'Surgical and medical procedures'
THEN 'Procedure'
WHEN soc_name IN (
'Product issues',
'Social circumstances'
)
THEN 'Observation'
WHEN soc_name = 'Investigations'
THEN 'Measurement'
ELSE 'Undefined'
END AS domain_id
FROM SOURCES.md_hierarchy h
WHERE primary_soc_fg = 'Y'
)
UPDATE concept_stage cs
SET domain_id = t.domain_id
FROM t_domains t
WHERE cs.concept_code = t.concept_code::VARCHAR;
--discovered that there are concepts missing from t_domains because their primary_soc_fg = 'N'
--empirically discovered that their domain = 'Condition'
UPDATE concept_stage cs
SET domain_id = 'Condition'
WHERE domain_id IS NULL;
--5. Create internal hierarchical relationships
INSERT INTO concept_relationship_stage (concept_code_1,
concept_code_2,
vocabulary_id_1,
vocabulary_id_2,
relationship_id,
valid_start_date,
valid_end_date,
invalid_reason)
SELECT soc_code AS concept_code_1,
hlgt_code AS concept_code_2,
'MedDRA' AS vocabulary_id_1,
'MedDRA' AS vocabulary_id_2,
'Subsumes' AS relationship_id,
(SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'),
TO_DATE ('31.12.2099', 'dd.mm.yyyy'),
NULL
FROM SOURCES.soc_hlgt_comp
UNION ALL
SELECT hlgt_code AS concept_code_1,
hlt_code AS concept_code_2,
'MedDRA' AS vocabulary_id_1,
'MedDRA' AS vocabulary_id_2,
'Subsumes' AS relationship_id,
(SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'),
TO_DATE ('31.12.2099', 'dd.mm.yyyy'),
NULL
FROM SOURCES.hlgt_hlt_comp
UNION ALL
SELECT hlt_code AS concept_code_1,
pt_code AS concept_code_2,
'MedDRA' AS vocabulary_id_1,
'MedDRA' AS vocabulary_id_2,
'Subsumes' AS relationship_id,
(SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'),
TO_DATE ('31.12.2099', 'dd.mm.yyyy'),
NULL
FROM SOURCES.hlt_pref_comp
UNION ALL
SELECT pt_code AS concept_code_1,
llt_code AS concept_code_2,
'MedDRA' AS vocabulary_id_1,
'MedDRA' AS vocabulary_id_2,
'Subsumes' AS relationship_id,
(SELECT latest_update
FROM vocabulary
WHERE vocabulary_id = 'MedDRA'),
TO_DATE ('31.12.2099', 'dd.mm.yyyy'),
NULL
FROM SOURCES.low_level_term
WHERE llt_currency = 'Y' AND llt_code <> pt_code;
--6. Append result to concept_relationship_stage table
/*DO $_$
BEGIN
PERFORM VOCABULARY_PACK.ProcessManualRelationships();
END $_$;*/
--7. Working with replacement mappings
/*DO $_$
BEGIN
PERFORM VOCABULARY_PACK.CheckReplacementMappings();
END $_$;*/
--8. Add mapping from deprecated to fresh concepts
/*DO $_$
BEGIN
PERFORM VOCABULARY_PACK.AddFreshMAPSTO();
END $_$;*/
--9. Deprecate 'Maps to' mappings to deprecated and upgraded concepts
/*DO $_$
BEGIN
PERFORM VOCABULARY_PACK.DeprecateWrongMAPSTO();
END $_$;*/
--10. Delete ambiguous 'Maps to' mappings
/*DO $_$
BEGIN
PERFORM VOCABULARY_PACK.DeleteAmbiguousMAPSTO();
END $_$;*/
-- At the end, the three tables concept_stage, concept_relationship_stage and concept_synonym_stage should be ready to be fed into the generic_update.sql script | the_stack |
-- Automatically creates tables and views
-- Timescaledb extension
CREATE EXTENSION IF NOT EXISTS timescaledb;
-- Create OHLCVS table
-- Create Symbol exchange table
-- Create OHLCVS errors table
-- Create test table (resembles OHLCVS)
CREATE TABLE IF NOT EXISTS ohlcvs (
time TIMESTAMPTZ NOT NULL,
exchange VARCHAR(100) NOT NULL,
base_id VARCHAR(20) NOT NULL,
quote_id VARCHAR(20) NOT NULL,
open NUMERIC NOT NULL,
high NUMERIC NOT NULL,
low NUMERIC NOT NULL,
close NUMERIC NOT NULL,
volume NUMERIC NOT NULL
);
CREATE TABLE IF NOT EXISTS symbol_exchange (
exchange VARCHAR(100) NOT NULL,
base_id VARCHAR(20) NOT NULL,
quote_id VARCHAR(20) NOT NULL,
symbol VARCHAR(40) NOT NULL,
is_trading BOOLEAN NOT NULL
);
CREATE TABLE IF NOT EXISTS ohlcvs_errors (
exchange VARCHAR(100) NOT NULL,
symbol VARCHAR(20) NOT NULL,
start_date TIMESTAMPTZ NOT NULL,
end_date TIMESTAMPTZ NOT NULL,
time_frame VARCHAR(10) NOT NULL,
ohlcv_section VARCHAR(30),
resp_status_code SMALLINT,
exception_class TEXT NOT NULL,
exception_message TEXT
);
CREATE TABLE IF NOT EXISTS test (
id NUMERIC NOT NULL,
b VARCHAR(20) NOT NULL,
q VARCHAR(20) NOT NULL,
o NUMERIC,
c NUMERIC
);
-- Create primary key constraints
ALTER TABLE ohlcvs
ADD PRIMARY KEY (exchange, base_id, quote_id, "time");
ALTER TABLE symbol_exchange
ADD PRIMARY KEY (exchange, base_id, quote_id);
ALTER TABLE ohlcvs_errors
ADD PRIMARY KEY (exception_class, exchange, symbol, start_date, end_date, time_frame);
ALTER TABLE test
ADD PRIMARY KEY (id, b, q);
-- Create foreign key constraints
ALTER TABLE ohlcvs
ADD CONSTRAINT exch_base_quote_fkey
FOREIGN KEY (exchange, base_id, quote_id)
REFERENCES symbol_exchange (exchange, base_id, quote_id)
ON DELETE CASCADE;
-- Create indices
CREATE INDEX IF NOT EXISTS ohlcvs_time_idx ON ohlcvs ("time" ASC);
CREATE INDEX IF NOT EXISTS ohlcvs_exch_time_idx ON ohlcvs (exchange, "time" ASC);
CREATE INDEX IF NOT EXISTS ohlcvs_base_quote_time_idx ON ohlcvs (base_id, quote_id, "time" ASC);
CREATE UNIQUE INDEX IF NOT EXISTS symexch_exch_sym_idx ON symbol_exchange (exchange, symbol);
CREATE INDEX IF NOT EXISTS symexch_exch_idx ON symbol_exchange (exchange);
CREATE INDEX IF NOT EXISTS symexch_base_idx ON symbol_exchange (base_id);
CREATE INDEX IF NOT EXISTS symexch_quote_idx ON symbol_exchange (quote_id);
-- Create timescaledb hypertable
SELECT create_hypertable('ohlcvs', 'time');
-- Create materialized view for common base - quote among exchanges
-- The condition on COUNT() can change as more exchanges are added
-- This view is temporarily used to choose which symbols to fetch
-- ohlcvs data, because storage is limited
CREATE MATERIALIZED VIEW common_basequote_30 AS
SELECT base_id, quote_id
FROM symbol_exchange
GROUP BY base_id, quote_id HAVING COUNT(*) > 2
ORDER BY base_id ASC, quote_id ASC
LIMIT 30;
-- Continous aggregations
-- Create
CREATE MATERIALIZED VIEW ohlcvs_summary_daily
WITH (timescaledb.continuous) AS
SELECT time_bucket('1 day', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") AS "open",
max(high) AS "high",
min(low) AS "low",
last(close, "time") AS "close",
sum(volume) AS "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE MATERIALIZED VIEW ohlcvs_summary_5min
WITH (timescaledb.continuous) AS
SELECT time_bucket('5 minutes', "time") as "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_15min
WITH (timescaledb.continuous) AS
SELECT time_bucket('15 minutes', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_30min
WITH (timescaledb.continuous) AS
SELECT time_bucket('30 minutes', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_1hour
WITH (timescaledb.continuous) AS
SELECT time_bucket('1 hour', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_6hour
WITH (timescaledb.continuous) AS
SELECT time_bucket('6 hours', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_12hour
WITH (timescaledb.continuous) AS
SELECT time_bucket('12 hours', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE materialized view ohlcvs_summary_7day
WITH (timescaledb.continuous) AS
SELECT time_bucket('7 days', "time") AS "bucket",
exchange,
base_id,
quote_id,
first(open, "time") as "open",
max(high) as "high",
min(low) as "low",
last(close, "time") as "close",
sum(volume) as "volume"
FROM ohlcvs
GROUP BY exchange, base_id, quote_id, "bucket"
WITH NO DATA;
CREATE MATERIALIZED VIEW geo_daily_return AS
WITH
close_filled AS (
SELECT
generate_series(
bucket,
LEAD(bucket, 1) OVER (
PARTITION BY exchange, base_id, quote_id ORDER BY bucket
) - interval '1 day',
interval '1 day'
)::date AS bucket,
exchange,
base_id,
quote_id,
close
FROM ohlcvs_summary_daily
WHERE bucket >= (CURRENT_DATE - interval '8 days')
AND close <> 0
-- AND exchange='bittrex' AND base_id='GBYTE' AND quote_id='BTC'
),
prev_close_view AS (
SELECT
*,
LAG(close) OVER (
PARTITION BY exchange, base_id, quote_id
ORDER BY bucket ASC) AS prev_close
FROM close_filled
),
daily_factor AS (
SELECT
bucket,
exchange,
base_id,
quote_id,
LN(close/prev_close) AS ln_daily_factor
FROM prev_close_view
)
-- SELECT *
-- FROM daily_factor;
SELECT
exchange,
base_id,
quote_id,
CAST(
(POWER(EXP(SUM(ln_daily_factor)), (1.0/COUNT(*))) - 1) * 100
AS NUMERIC(10, 4)
) AS daily_return_pct
FROM daily_factor
WHERE ln_daily_factor IS NOT NULL
GROUP BY exchange, base_id, quote_id
ORDER BY daily_return_pct DESC
WITH NO DATA;
CREATE MATERIALIZED VIEW top_20_quoted_vol AS
WITH
ebq_quoted_vol AS (
SELECT
exchange, base_id, quote_id,
close * volume AS quoted_vol
FROM ohlcvs_summary_7day
WHERE bucket >= (CURRENT_DATE - interval '8 days')
),
bq_quoted_vol AS (
SELECT
base_id, quote_id, SUM(quoted_vol) AS ttl_quoted_vol
FROM ebq_quoted_vol
GROUP BY base_id, quote_id
),
bqgrp_qoute_vol AS (
SELECT
(CASE
WHEN ranking > 20 THEN 'Other'
ELSE concat(base_id, '-', quote_id)
END) AS bqgrp,
ttl_quoted_vol
FROM (
SELECT
*,
ROW_NUMBER() OVER(ORDER BY ttl_quoted_vol DESC) AS ranking
FROM bq_quoted_vol
) AS temp
ORDER BY ttl_quoted_vol DESC
)
SELECT
bqgrp,
ROUND(SUM(ttl_quoted_vol), 4) AS total_volume
FROM bqgrp_qoute_vol
GROUP BY bqgrp
WITH NO DATA;
CREATE MATERIALIZED VIEW weekly_return AS
SELECT
bucket AS time,
exchange, base_id, quote_id,
ROUND(((close_price - open_price) / open_price) * 100, 4) AS weekly_return_pct
FROM (
SELECT DISTINCT ON (exchange, base_id, quote_id)
time_bucket('1 week', time) as bucket,
exchange, base_id, quote_id,
first(open, time) as open_price,
last(close, time) as close_price
FROM ohlcvs
WHERE time >= (CURRENT_DATE - interval '1 week')
GROUP BY exchange, base_id, quote_id, bucket
ORDER BY exchange, base_id, quote_id, bucket DESC
) temp
WHERE close_price IS NOT NULL
AND open_price IS NOT NULL and open_price <> 0
ORDER BY weekly_return_pct DESC
WITH NO DATA;
-- Indices on materialized views
CREATE UNIQUE INDEX geo_dr_idx ON geo_daily_return (exchange, base_id, quote_id);
CREATE UNIQUE INDEX top_20_qvlm_idx ON top_20_quoted_vol (bqgrp);
CREATE UNIQUE INDEX wr_idx ON weekly_return (exchange, base_id, quote_id, time);
-- Schedule continous aggregations
SELECT add_continuous_aggregate_policy('ohlcvs_summary_daily',
start_offset => INTERVAL '3 days',
end_offset => INTERVAL '1 day',
schedule_interval => INTERVAL '1 day');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_5min',
start_offset => INTERVAL '15 minutes',
end_offset => INTERVAL '5 minutes',
schedule_interval => INTERVAL '5 minutes');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_15min',
start_offset => INTERVAL '45 minutes',
end_offset => INTERVAL '15 minutes',
schedule_interval => INTERVAL '15 minutes');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_30min',
start_offset => INTERVAL '90 minutes',
end_offset => INTERVAL '30 minutes',
schedule_interval => INTERVAL '30 minutes');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_1hour',
start_offset => INTERVAL '3 hours',
end_offset => INTERVAL '1 hour',
schedule_interval => INTERVAL '1 hour');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_6hour',
start_offset => INTERVAL '18 hours',
end_offset => INTERVAL '6 hours',
schedule_interval => INTERVAL '6 hours');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_12hour',
start_offset => INTERVAL '36 hours',
end_offset => INTERVAL '12 hours',
schedule_interval => INTERVAL '12 hours');
SELECT add_continuous_aggregate_policy('ohlcvs_summary_7day',
start_offset => INTERVAL '21 days',
end_offset => INTERVAL '7 days',
schedule_interval => INTERVAL '7 days'); | the_stack |
-- // create baseline
--
-- Name: blueprint; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE blueprint (
id bigint NOT NULL,
account character varying(255),
blueprintname character varying(255),
blueprinttext text,
description text,
hostgroupcount integer NOT NULL,
name character varying(255) NOT NULL,
owner character varying(255),
publicinaccount boolean NOT NULL
);
--
-- Name: blueprint_table; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE blueprint_table
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: cloudbreakevent; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE cloudbreakevent (
id bigint NOT NULL,
account character varying(255),
blueprintid bigint NOT NULL,
blueprintname character varying(255),
cloud character varying(255),
eventmessage character varying(255),
eventtimestamp timestamp without time zone,
eventtype character varying(255),
instancegroup character varying(255),
nodecount integer,
owner character varying(255),
region character varying(255),
stackid bigint,
stackname character varying(255),
stackstatus character varying(255)
);
--
-- Name: cloudbreakusage; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE cloudbreakusage (
id bigint NOT NULL,
account character varying(255),
costs double precision,
day timestamp without time zone,
instancegroup character varying(255),
instancehours bigint,
instancetype character varying(255),
owner character varying(255),
provider character varying(255),
region character varying(255),
stackid bigint,
stackname character varying(255)
);
--
-- Name: cluster; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE cluster (
id bigint NOT NULL,
account character varying(255),
creationfinished bigint,
creationstarted bigint,
description text,
emailneeded boolean,
kerberosadmin character varying(255),
kerberosmasterkey character varying(255),
kerberospassword character varying(255),
name character varying(255) NOT NULL,
owner character varying(255),
secure boolean,
status character varying(255),
statusreason text,
upsince bigint,
blueprint_id bigint
);
--
-- Name: credential; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE credential (
dtype character varying(31) NOT NULL,
id bigint NOT NULL,
account character varying(255),
description text,
name character varying(255) NOT NULL,
owner character varying(255),
publicinaccount boolean NOT NULL,
publickey text,
endpoint character varying(255),
password character varying(255),
tenantname character varying(255),
username character varying(255),
projectid character varying(255),
serviceaccountid character varying(255),
serviceaccountprivatekey text,
keypairname character varying(255),
rolearn character varying(255),
cerfile text,
jks character varying(255),
jksfile text,
postfix character varying(255),
sshcerfile text,
subscriptionid character varying(255),
temporaryawscredentials_accesskeyid character varying(255)
);
--
-- Name: credential_table; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE credential_table
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: failurepolicy; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE failurepolicy (
id bigint NOT NULL,
adjustmenttype character varying(255),
threshold bigint
);
--
-- Name: hibernate_sequence; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE hibernate_sequence
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: hostgroup; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE hostgroup (
id bigint NOT NULL,
name character varying(255),
cluster_id bigint,
instancegroup_id bigint
);
--
-- Name: hostgroup_recipe; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE hostgroup_recipe (
hostgroup_id bigint NOT NULL,
recipes_id bigint NOT NULL
);
--
-- Name: hostmetadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE hostmetadata (
id bigint NOT NULL,
hostname character varying(255),
hostgroup_id bigint
);
--
-- Name: instancegroup; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE instancegroup (
id bigint NOT NULL,
groupname character varying(255),
instancegrouptype character varying(255),
nodecount integer,
stack_id bigint,
template_id bigint
);
--
-- Name: instancemetadata; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE instancemetadata (
id bigint NOT NULL,
ambariserver boolean,
consulserver boolean,
containercount integer,
dockersubnet character varying(255),
instanceid character varying(255),
instancestatus character varying(255),
longname character varying(255),
privateip character varying(255),
publicip character varying(255),
startdate bigint,
terminationdate bigint,
volumecount integer,
instancegroup_id bigint
);
--
-- Name: recipe; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE recipe (
id bigint NOT NULL,
account character varying(255),
description character varying(255),
name character varying(255),
owner character varying(255),
publicinaccount boolean NOT NULL,
timeout integer
);
--
-- Name: recipe_keyvalues; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE recipe_keyvalues (
recipe_id bigint NOT NULL,
value text,
key character varying(255) NOT NULL
);
--
-- Name: recipe_plugins; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE recipe_plugins (
recipe_id bigint NOT NULL,
execution_type character varying(255),
plugin character varying(255) NOT NULL
);
--
-- Name: resource; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE resource (
id bigint NOT NULL,
instancegroup character varying(255),
resourcename character varying(255),
resourcetype character varying(255),
resource_stack bigint
);
--
-- Name: sequence_table; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE sequence_table
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: stack; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE stack (
id bigint NOT NULL,
account character varying(255),
ambariip character varying(255),
consulservers integer NOT NULL,
description text,
hash character varying(255),
image character varying(255),
metadataready boolean NOT NULL,
name character varying(255) NOT NULL,
onfailureactionaction character varying(255),
owner character varying(255),
password character varying(255),
publicinaccount boolean NOT NULL,
region character varying(255),
status character varying(255),
statusreason text,
username character varying(255),
version bigint,
cluster_id bigint,
credential_id bigint,
failurepolicy_id bigint
);
--
-- Name: stack_parameters; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE stack_parameters (
stack_id bigint NOT NULL,
value text,
key character varying(255) NOT NULL
);
--
-- Name: stack_table; Type: SEQUENCE; Schema: public; Owner: postgres
--
CREATE SEQUENCE stack_table
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: subnet; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE subnet (
id bigint NOT NULL,
cidr character varying(255),
modifiable boolean NOT NULL,
stack_id bigint
);
--
-- Name: subscription; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE subscription (
id bigint NOT NULL,
clientid character varying(255),
endpoint character varying(255)
);
--
-- Name: template; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE template (
dtype character varying(31) NOT NULL,
id bigint NOT NULL,
account character varying(255),
deleted boolean NOT NULL,
description text,
name character varying(255) NOT NULL,
owner character varying(255),
publicinaccount boolean NOT NULL,
volumecount integer,
volumesize integer,
gccinstancetype character varying(255),
gccrawdisktype character varying(255),
instancetype character varying(255),
publicnetid character varying(255),
encrypted character varying(255),
spotprice double precision,
sshlocation character varying(255),
volumetype character varying(255),
vmtype character varying(255)
);
--
-- Name: temporaryawscredentials; Type: TABLE; Schema: public; Owner: postgres; Tablespace:
--
CREATE TABLE temporaryawscredentials (
accesskeyid character varying(255) NOT NULL,
secretaccesskey character varying(255),
sessiontoken text,
validuntil bigint NOT NULL
);
--
-- Name: blueprint_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY blueprint
ADD CONSTRAINT blueprint_pkey PRIMARY KEY (id);
--
-- Name: cloudbreakevent_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY cloudbreakevent
ADD CONSTRAINT cloudbreakevent_pkey PRIMARY KEY (id);
--
-- Name: cloudbreakusage_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY cloudbreakusage
ADD CONSTRAINT cloudbreakusage_pkey PRIMARY KEY (id);
--
-- Name: cluster_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY cluster
ADD CONSTRAINT cluster_pkey PRIMARY KEY (id);
--
-- Name: credential_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY credential
ADD CONSTRAINT credential_pkey PRIMARY KEY (id);
--
-- Name: failurepolicy_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY failurepolicy
ADD CONSTRAINT failurepolicy_pkey PRIMARY KEY (id);
--
-- Name: hostgroup_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY hostgroup
ADD CONSTRAINT hostgroup_pkey PRIMARY KEY (id);
--
-- Name: hostgroup_recipe_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY hostgroup_recipe
ADD CONSTRAINT hostgroup_recipe_pkey PRIMARY KEY (hostgroup_id, recipes_id);
--
-- Name: hostmetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY hostmetadata
ADD CONSTRAINT hostmetadata_pkey PRIMARY KEY (id);
--
-- Name: instancegroup_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY instancegroup
ADD CONSTRAINT instancegroup_pkey PRIMARY KEY (id);
--
-- Name: instancemetadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY instancemetadata
ADD CONSTRAINT instancemetadata_pkey PRIMARY KEY (id);
--
-- Name: recipe_keyvalues_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY recipe_keyvalues
ADD CONSTRAINT recipe_keyvalues_pkey PRIMARY KEY (recipe_id, key);
--
-- Name: recipe_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY recipe
ADD CONSTRAINT recipe_pkey PRIMARY KEY (id);
--
-- Name: recipe_plugins_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY recipe_plugins
ADD CONSTRAINT recipe_plugins_pkey PRIMARY KEY (recipe_id, plugin);
--
-- Name: resource_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY resource
ADD CONSTRAINT resource_pkey PRIMARY KEY (id);
--
-- Name: stack_parameters_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY stack_parameters
ADD CONSTRAINT stack_parameters_pkey PRIMARY KEY (stack_id, key);
--
-- Name: stack_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY stack
ADD CONSTRAINT stack_pkey PRIMARY KEY (id);
--
-- Name: subnet_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY subnet
ADD CONSTRAINT subnet_pkey PRIMARY KEY (id);
--
-- Name: subscription_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY subscription
ADD CONSTRAINT subscription_pkey PRIMARY KEY (id);
--
-- Name: template_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY template
ADD CONSTRAINT template_pkey PRIMARY KEY (id);
--
-- Name: temporaryawscredentials_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY temporaryawscredentials
ADD CONSTRAINT temporaryawscredentials_pkey PRIMARY KEY (accesskeyid);
--
-- Name: uk_4k41j29yr72m8g4jswkxae3wf; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY credential
ADD CONSTRAINT uk_credential_account_name UNIQUE (account, name);
--
-- Name: uk_6linr69q0qs341t4dwksmj2k5; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY blueprint
ADD CONSTRAINT uk_blueprint_account_name UNIQUE (account, name);
--
-- Name: uk_mo42wgq8xghku0pud0d7u9gxq; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY recipe
ADD CONSTRAINT uk_recipe_account_name UNIQUE (account, name);
--
-- Name: uk_pcvp0gu76lf195n5e4bwn62hb; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY template
ADD CONSTRAINT uk_template_account_name UNIQUE (account, name);
--
-- Name: uk_q7w7pdmjrdfy3qfst9l8nr2qb; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY stack
ADD CONSTRAINT uk_stack_account_name UNIQUE (account, name);
--
-- Name: uk_qdso8lss2eqqwfaombyqmv02g; Type: CONSTRAINT; Schema: public; Owner: postgres; Tablespace:
--
ALTER TABLE ONLY cluster
ADD CONSTRAINT uk_cluster_account_name UNIQUE (account, name);
--
-- Name: fk_51q9ax178o05oeghwi0hr8is5; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY credential
ADD CONSTRAINT fk_credential_temporaryawscredentials_accesskeyid FOREIGN KEY (temporaryawscredentials_accesskeyid) REFERENCES temporaryawscredentials(accesskeyid);
--
-- Name: fk_5k57kt8hao4tipnk2umnxe0yn; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY hostmetadata
ADD CONSTRAINT fk_credential_hostgroup_id FOREIGN KEY (hostgroup_id) REFERENCES hostgroup(id);
--
-- Name: fk_7175vk9tgmhvq04moxgqv5e8o; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY stack
ADD CONSTRAINT fk_stack_credential_id FOREIGN KEY (credential_id) REFERENCES credential(id);
--
-- Name: fk_72cagekv8hcc33jeb47cyxyri; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY stack
ADD CONSTRAINT fk_stack_failurepolicy_id FOREIGN KEY (failurepolicy_id) REFERENCES failurepolicy(id);
--
-- Name: fk_8u5d88nlm8c16970kb9km4sft; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY recipe_keyvalues
ADD CONSTRAINT fk_recipe_keyvalues_recipe_id FOREIGN KEY (recipe_id) REFERENCES recipe(id);
--
-- Name: fk_92s82wcie3ogn4ohtjrbrmhaq; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY instancegroup
ADD CONSTRAINT fk_instancegroup_stack_id FOREIGN KEY (stack_id) REFERENCES stack(id);
--
-- Name: fk_al5gsr371caho71l56lx1c9ff; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY stack
ADD CONSTRAINT fk_stack_cluster_id FOREIGN KEY (cluster_id) REFERENCES cluster(id);
--
-- Name: fk_exmyy4cp2bumxgas1fu7ngsn0; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY resource
ADD CONSTRAINT fk_resource_resource_stack FOREIGN KEY (resource_stack) REFERENCES stack(id);
--
-- Name: fk_hqexgxamn3rb3rb1tqeieoabx; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY stack_parameters
ADD CONSTRAINT fk_stack_parameters_stack_id FOREIGN KEY (stack_id) REFERENCES stack(id);
--
-- Name: fk_i1l3439ihtev4gi9fqkj2f8ko; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY recipe_plugins
ADD CONSTRAINT fk_recipe_plugins_recipe_id FOREIGN KEY (recipe_id) REFERENCES recipe(id);
--
-- Name: fk_illyjalblhf0o1eilo7r65rdd; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY cluster
ADD CONSTRAINT fk_cluster_blueprint_id FOREIGN KEY (blueprint_id) REFERENCES blueprint(id);
--
-- Name: fk_k6ykksyri55tik7p4njurovk2; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY hostgroup_recipe
ADD CONSTRAINT fk_hostgroup_recipe_recipes_id FOREIGN KEY (recipes_id) REFERENCES recipe(id);
--
-- Name: fk_m9cnw9ixk8y44uvdaq41wxu91; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY instancemetadata
ADD CONSTRAINT fk_instancemetadata_instancegroup_id FOREIGN KEY (instancegroup_id) REFERENCES instancegroup(id);
--
-- Name: fk_ninuqigdnafac9fuwm5ia0np3; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY hostgroup_recipe
ADD CONSTRAINT fk_hostgroup_recipe_hostgroup_id FOREIGN KEY (hostgroup_id) REFERENCES hostgroup(id);
--
-- Name: fk_ofdfpdym6h4ri22dx5d41txe; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY subnet
ADD CONSTRAINT fk_subnet_stack_id FOREIGN KEY (stack_id) REFERENCES stack(id);
--
-- Name: fk_r1jejvs5t9rcnr5grr0popkp7; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY instancegroup
ADD CONSTRAINT fk_instancegroup_template_id FOREIGN KEY (template_id) REFERENCES template(id);
--
-- Name: fk_skihiei13mu259om0q2ic83y1; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY hostgroup
ADD CONSTRAINT fk_hostgroup_cluster_id FOREIGN KEY (cluster_id) REFERENCES cluster(id);
--
-- Name: fk_sridmlmxkiqigk3p62kh52sdx; Type: FK CONSTRAINT; Schema: public; Owner: postgres
--
ALTER TABLE ONLY hostgroup
ADD CONSTRAINT fk_hostgroup_instancegroup_id FOREIGN KEY (instancegroup_id) REFERENCES instancegroup(id);
-- //@UNDO | the_stack |
create or replace package body jsignature_pkg as
/**********************************************************************************
*
* Description: Package containing code related to jSignature data extraction
*
*
* Modification History:
*
* Date Who Description
* ============ ================ ===========
* 24 May 2021 Ben Wetherall Initial version
*
**********************************************************************************/
/*******************************************************************************
* Name: debug
* Purpose: Output debug information
*******************************************************************************/
procedure debug (
p_statement_debug_level number,
p_debug_string varchar2
) is
begin
if gc_current_debug_level >= p_statement_debug_level then
dbms_output.put_line(p_debug_string);
end if;
end debug;
/*******************************************************************************
* Name: string_to_array
* Purpose: Take string - return each character as array elements
*******************************************************************************/
function string_to_array (
p_string varchar2
) return apex_t_varchar2 is
l_result_array apex_t_varchar2 := apex_t_varchar2();
l_index number;
begin
l_result_array.extend(length(p_string));
for l_index in 1 .. l_result_array.count loop
l_result_array(l_index) := substr(p_string, l_index, 1);
end loop;
return l_result_array;
end string_to_array;
/*******************************************************************************
* Name: intval
* Purpose: Convert number from other base to integer (base 10)
*******************************************************************************/
function intval (
p_value varchar2,
p_from_base number
) return number is
c_hex varchar2(30) default '0123456789ABCDEFGHIJKLMNOPQRST';
l_result number := 0;
begin
if ( p_value is null or p_from_base is null ) then
return null;
end if;
for i in 1 .. length(p_value) loop
l_result := l_result * p_from_base + instr(c_hex, upper(substr(p_value,i,1)))-1;
end loop;
return l_result;
end intval;
/*******************************************************************************
* Name: initialise_globals
* Purpose: Set up character maps
*******************************************************************************/
procedure initialise_globals is
l_allchars apex_t_varchar2;
l_index number;
l_display_index varchar2(10);
begin
l_allchars := string_to_array('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWX');
gc_bitness := (l_allchars.count) / 2;
-- Loop over allchar array
l_index := gc_bitness;
loop
gc_charmap(l_allchars(l_index)) := l_allchars(l_index + gc_bitness);
gc_charmap_reverse(l_allchars(l_index + gc_bitness)) := l_allchars(l_index);
l_index := l_index - 1;
exit when l_index = 0;
end loop;
-- Display array content (if debug turned on)
l_display_index := gc_charmap.first;
while l_display_index is not null loop
debug(gc_debug_detailed, 'Charmap ' || l_display_index || ' : ' || gc_charmap(l_display_index));
l_display_index := gc_charmap.next(l_display_index);
end loop;
l_display_index := gc_charmap_reverse.first;
while l_display_index is not null loop
debug(gc_debug_detailed, 'Reverse ' || l_display_index || ' : ' || gc_charmap_reverse(l_display_index));
l_display_index := gc_charmap_reverse.next(l_display_index);
end loop;
end initialise_globals;
/*******************************************************************************
* Name: uncompress_stroke_leg
* Purpose: Uncompress part of a stroke
*******************************************************************************/
function uncompress_stroke_leg (
p_stroke_data_string varchar2
) return apex_t_number is
l_answer_array apex_t_number := apex_t_number();
l_char_array apex_t_varchar2;
l_length number;
l_current_char varchar2(1);
l_polarity number := 1;
l_partial_array apex_t_varchar2 := apex_t_varchar2();
l_preprewhole number := 0;
l_prewhole number := 0;
begin
-- Split string into array
l_char_array := jsignature_pkg.string_to_array(p_stroke_data_string);
l_length := l_char_array.count;
-- Loop over each character
for l_index in 1 .. l_length loop
debug(gc_debug_detailed, 'partialarray ' || apex_string.join(l_partial_array,','));
debug(gc_debug_detailed, 'answerarray ' || apex_string.join(l_answer_array,','));
l_current_char := l_char_array(l_index);
if gc_charmap.exists(l_current_char) or l_current_char = gc_minus or l_current_char = gc_plus then
if l_partial_array.count != 0 then
l_prewhole := jsignature_pkg.intval(apex_string.join(l_partial_array,''), gc_bitness) * l_polarity + l_preprewhole;
l_answer_array.extend;
l_answer_array(l_answer_array.last) := l_prewhole;
l_preprewhole := l_prewhole;
l_partial_array.delete;
end if;
if l_current_char = gc_minus then
l_polarity := -1;
l_partial_array := apex_t_varchar2();
elsif l_current_char = gc_plus then
l_polarity := 1;
l_partial_array := apex_t_varchar2();
else
l_partial_array.extend;
l_partial_array(l_partial_array.last) := l_current_char;
end if;
else
-- more parts for the new number
l_partial_array.extend;
l_partial_array(l_partial_array.last) := gc_charmap_reverse(l_current_char);
end if;
end loop;
-- we always will have something stuck in partial
l_answer_array.extend;
l_answer_array(l_answer_array.last) := jsignature_pkg.intval(apex_string.join(l_partial_array,''), gc_bitness) * l_polarity + l_preprewhole;
debug(gc_debug_detailed, 'answerarrayfinal ' || apex_string.join(l_answer_array,','));
return l_answer_array;
end uncompress_stroke_leg;
/*******************************************************************************
* Name: base30_to_json
* Purpose: Convert jSignature base30 encoded string to JSON (with stroke details)
*******************************************************************************/
function base30_to_json (
p_base30_string varchar2
) return clob is
l_return_clob clob;
l_chunk_array apex_t_varchar2;
l_x_stroke_array apex_t_number;
l_y_stroke_array apex_t_number;
l_entry_count number;
l_index number := 0;
begin
initialise_globals;
-- Split input into chunks per stroke + axis
l_chunk_array := apex_string.split(p_base30_string, gc_chunk_separator);
l_entry_count := l_chunk_array.count / 2;
debug(gc_debug_detailed, 'Found Chunks:' || l_chunk_array.count);
-- Initialise json and start array
apex_json.initialize_clob_output;
apex_json.open_array;
-- Loop over chunks
loop
exit when l_index >= l_chunk_array.count;
debug(gc_debug_basic, 'x:' || apex_string.join(l_x_stroke_array, ',') || ' y:' || apex_string.join(l_y_stroke_array, ','));
-- Uncompress x and y strokes
l_x_stroke_array := jsignature_pkg.uncompress_stroke_leg(l_chunk_array(l_index + 1));
l_y_stroke_array := jsignature_pkg.uncompress_stroke_leg(l_chunk_array(l_index + 2));
apex_json.open_object;
apex_json.write('x', l_x_stroke_array);
apex_json.write('y', l_y_stroke_array);
apex_json.close_object;
l_index := l_index + 2;
end loop;
-- Finalise the JSON
apex_json.close_array;
l_return_clob := apex_json.get_clob_output;
apex_json.free_output;
return l_return_clob;
end base30_to_json;
/*******************************************************************************
* Name: generate_stroke_path
* Purpose: Generate SVG path element for given X/Y arrays
*******************************************************************************/
function generate_stroke_path (
p_x_stroke_array apex_t_number,
p_y_stroke_array apex_t_number,
p_minx number,
p_miny number
) return varchar2 is
l_return_string varchar2(30000);
l_path_string varchar2(20000);
l_lastx number;
l_lasty number;
l_index number;
begin
l_lastx := p_x_stroke_array(1);
l_lasty := p_y_stroke_array(1);
l_path_string := 'M ' || round(l_lastx - p_minx) || ' ' || round(l_lasty - p_miny) || ' l';
-- Meaning this was just a DOT, not a stroke
if p_x_stroke_array.count = 1 then
l_path_string := l_path_string || ' 1 -1';
else
-- Process stroke
for l_index in 1 .. p_x_stroke_array.count loop
l_path_string := l_path_string || ' ' || (p_x_stroke_array(l_index) - l_lastx);
l_path_string := l_path_string || ' ' || (p_y_stroke_array(l_index) - l_lasty);
l_lastx := p_x_stroke_array(l_index);
l_lasty := p_y_stroke_array(l_index);
end loop;
end if;
-- Build path element and return it
l_return_string := '<path fill="none" stroke="#000000" stroke-width="2"' ||
' stroke-linecap="round" stroke-linejoin="round"' ||
' d="' || l_path_string || '"/>';
return l_return_string;
end generate_stroke_path;
/*******************************************************************************
* Name: calculate_dimensions
* Purpose: Work out dimensions based on the stroke array
*******************************************************************************/
procedure calculate_dimensions (
p_json_object apex_json.t_values,
p_sizex out number,
p_sizey out number,
p_minx out number,
p_miny out number
) is
l_array_size number;
l_index number;
l_x_stroke_array apex_t_number;
l_y_stroke_array apex_t_number;
l_current_minx number;
l_current_maxx number;
l_current_miny number;
l_current_maxy number;
l_overall_minx number := 0;
l_overall_maxx number := 0;
l_overall_miny number := 0;
l_overall_maxy number := 0;
l_padding number := 1;
begin
l_array_size := apex_json.get_count(p_path => '.', p_values => p_json_object);
-- Loop over stroke array
for l_index in 1 .. l_array_size loop
l_x_stroke_array := apex_json.get_t_number (
p_values => p_json_object,
p_path => '[%d].x',
p0 => l_index
);
l_y_stroke_array := apex_json.get_t_number (
p_values => p_json_object,
p_path => '[%d].y',
p0 => l_index
);
select min(column_value), max(column_value)
into l_current_minx, l_current_maxx
from table(l_x_stroke_array);
select min(column_value), max(column_value)
into l_current_miny, l_current_maxy
from table(l_y_stroke_array);
if l_current_minx < l_overall_minx then
l_overall_minx := l_current_minx;
end if;
if l_current_miny < l_overall_miny then
l_overall_miny := l_current_miny;
end if;
if l_current_maxx > l_overall_maxx then
l_overall_maxx := l_current_maxx;
end if;
if l_current_maxy > l_overall_maxy then
l_overall_maxy := l_current_maxy;
end if;
end loop;
l_overall_minx := l_overall_minx - l_padding;
l_overall_miny := l_overall_miny - l_padding;
l_overall_maxx := l_overall_maxx + l_padding;
l_overall_maxy := l_overall_maxy + l_padding;
p_sizex := l_overall_maxx - l_overall_minx;
p_sizey := l_overall_maxy - l_overall_miny;
-- Fix if minimum X or Y is less than zero
if l_overall_minx < 0 then
l_overall_minx := 0;
end if;
if l_overall_miny < 0 then
l_overall_miny := 0;
end if;
p_minx := l_overall_minx;
p_miny := l_overall_miny;
end calculate_dimensions;
/*******************************************************************************
* Name: base30_to_svg
* Purpose: Convert jSignature base30 encoded string to SVG graphic format
*******************************************************************************/
function base30_to_svg (
p_base30_string varchar2
) return clob is
l_return_clob clob;
l_result_json clob;
l_json_object apex_json.t_values;
l_svg_array apex_t_varchar2 := apex_t_varchar2();
l_array_size number;
l_index number;
l_x_stroke_array apex_t_number;
l_y_stroke_array apex_t_number;
l_sizex number := 0;
l_sizey number := 0;
l_minx number := 0;
l_miny number := 0;
begin
if p_base30_string is null or p_base30_string = '' then
return null;
end if;
l_result_json := jsignature_pkg.base30_to_json(p_base30_string);
-- Parse the JSON, determine how many array entries
apex_json.parse(l_json_object, l_result_json);
l_array_size := apex_json.get_count(p_path => '.', p_values => l_json_object);
-- Start the svg
l_svg_array.extend;
l_svg_array(l_svg_array.last) := '<?xml version="1.0" encoding="UTF-8" standalone="no"?>';
l_svg_array.extend;
l_svg_array(l_svg_array.last) := '<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">';
-- Determine dimensions for the svg
jsignature_pkg.calculate_dimensions (
p_json_object => l_json_object,
p_sizex => l_sizex,
p_sizey => l_sizey,
p_minx => l_minx,
p_miny => l_miny
);
-- Write out size information to SVG array
l_svg_array.extend;
l_svg_array(l_svg_array.last) := '<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="' || l_sizex || '" height="' || l_sizey || '">';
-- Write out the strokes
for l_index in 1 .. l_array_size loop
l_x_stroke_array := apex_json.get_t_number (
p_values => l_json_object,
p_path => '[%d].x',
p0 => l_index
);
l_y_stroke_array := apex_json.get_t_number (
p_values => l_json_object,
p_path => '[%d].y',
p0 => l_index
);
-- Write out the SVG path element for this stroke
l_svg_array.extend;
l_svg_array(l_svg_array.last) := jsignature_pkg.generate_stroke_path (
p_x_stroke_array => l_x_stroke_array,
p_y_stroke_array => l_y_stroke_array,
p_minx => l_minx,
p_miny => l_miny
);
end loop;
-- Finish off the SVG
l_svg_array.extend;
l_svg_array(l_svg_array.last) := '</svg>';
-- Convert array back to concatenated lines of text
l_return_clob := apex_string.join_clob(l_svg_array);
return l_return_clob;
end base30_to_svg;
end jsignature_pkg;
/ | the_stack |
--
-- Test for Row Level Security feature
--
-- initial setup
CREATE USER regress_rls_alice NOLOGIN PASSWORD 'Ttest@123';
CREATE USER regress_rls_bob NOLOGIN PASSWORD 'Ttest@123';
CREATE USER regress_rls_david NOLOGIN PASSWORD 'Ttest@123';
CREATE USER regress_rls_peter NOLOGIN PASSWORD 'Ttest@123';
CREATE USER regress_rls_single_user NOLOGIN PASSWORD 'Ttest@123';
CREATE USER regress_rls_admin SYSADMIN NOLOGIN PASSWORD "Ttest@123";
CREATE ROLE regress_rls_group1 NOLOGIN PASSWORD 'Ttest@123';
CREATE ROLE regress_rls_group2 NOLOGIN PASSWORD 'Ttest@123';
GRANT ALL on pg_roles to public;
GRANT ALL on pg_user to public;
GRANT regress_rls_group1 TO regress_rls_alice, regress_rls_bob, regress_rls_peter;
GRANT regress_rls_group2 TO regress_rls_david, regress_rls_peter, regress_rls_admin;
CREATE SCHEMA regress_rls_schema;
GRANT CREATE ON SCHEMA regress_rls_schema to public;
GRANT USAGE ON SCHEMA regress_rls_schema to public;
ALTER DATABASE regression ENABLE PRIVATE OBJECT;
-- reconnect
\c
SET search_path = regress_rls_schema;
-- regress_rls_alice is the owner of all schema
SET ROLE regress_rls_alice PASSWORD 'Ttest@123';
-- setup of malicious function (NOT SHIPPABLE)
CREATE OR REPLACE FUNCTION regress_rls_schema.rls_fleak1(text) RETURNS bool
COST 0.0000001 LANGUAGE plpgsql
AS 'BEGIN RAISE NOTICE ''f_leak => %'', $1; RETURN true; END';
GRANT EXECUTE ON FUNCTION regress_rls_schema.rls_fleak1(text) TO public;
-- setup of malicious immutable function (SHIPPABLE)
CREATE OR REPLACE FUNCTION regress_rls_schema.rls_fleak2(text) RETURNS bool
COST 0.0000001 LANGUAGE plpgsql IMMUTABLE
AS 'BEGIN RAISE NOTICE ''f_leak => %'', $1; RETURN true; END';
GRANT EXECUTE ON FUNCTION regress_rls_schema.rls_fleak2(text) TO public;
-- auto generate row level security policy
CREATE OR REPLACE FUNCTION regress_rls_schema.rls_auto_create_policy(t_name text, p_num int)
RETURNS INTEGER AS $$
DECLARE
counter INTEGER := 1;
query text;
BEGIN
WHILE counter <= p_num LOOP
query := 'CREATE ROW LEVEL SECURITY POLICY ' || t_name || '_rls_' || counter || ' ON ' || t_name || ' USING(TRUE);';
EXECUTE query;
counter := counter + 1;
END LOOP;
RETURN 1;
END;
$$language plpgsql;
REVOKE EXECUTE ON FUNCTION regress_rls_schema.rls_auto_create_policy(text, int) FROM public;
-- auto drop row level security policy
CREATE OR REPLACE FUNCTION regress_rls_schema.rls_auto_drop_policy(t_name text, p_num int) RETURNS INTEGER AS $$
DECLARE
counter INTEGER := 1;
query text;
BEGIN
WHILE counter <= p_num LOOP
query := 'DROP ROW LEVEL SECURITY POLICY ' || t_name || '_rls_' || counter || ' ON ' || t_name;
EXECUTE query;
counter := counter + 1;
END LOOP;
RETURN 1;
END;
$$language plpgsql;
REVOKE EXECUTE ON FUNCTION regress_rls_schema.rls_auto_drop_policy(text, int) FROM public;
-- BASIC Row-Level Security Scenario
CREATE TABLE regress_rls_schema.account_row(
aid int,
aname varchar(100)
) WITH (ORIENTATION=row);
GRANT SELECT ON regress_rls_schema.account_row TO public;
INSERT INTO regress_rls_schema.account_row VALUES
(1, 'regress_rls_alice'),
(2, 'regress_rls_bob'),
(3, 'regress_rls_david'),
(4, 'regress_rls_peter'),
(5, 'regress_rls_admin'),
(6, 'regress_rls_single_user');
ANALYZE regress_rls_schema.account_row;
CREATE TABLE regress_rls_schema.account_col(
aid int,
aname varchar(100)
) WITH (ORIENTATION=column);
GRANT SELECT ON regress_rls_schema.account_col TO public;
INSERT INTO regress_rls_schema.account_col SELECT * FROM regress_rls_schema.account_row;
ANALYZE regress_rls_schema.account_col;
CREATE TABLE regress_rls_schema.category_row(
cid int primary key,
cname text
) WITH (ORIENTATION=row);
GRANT ALL ON regress_rls_schema.category_row TO public;
INSERT INTO regress_rls_schema.category_row VALUES
(11, 'novel'),
(22, 'science fiction'),
(33, 'technology'),
(44, 'manga'),
(55, 'biography');
ANALYZE regress_rls_schema.category_row;
CREATE TABLE regress_rls_schema.category_col(
cid int,
cname text
) WITH (ORIENTATION=column);
GRANT ALL ON regress_rls_schema.category_col TO public;
INSERT INTO regress_rls_schema.category_col SELECT * FROM regress_rls_schema.category_row;
ANALYZE regress_rls_schema.category_col;
CREATE TABLE regress_rls_schema.document_row(
did int primary key,
cid int,
dlevel int not null,
dauthor name,
dtitle text
);
GRANT ALL ON regress_rls_schema.document_row TO public;
INSERT INTO regress_rls_schema.document_row VALUES
( 1, 11, 1, 'regress_rls_bob', 'my first novel'),
( 2, 11, 5, 'regress_rls_bob', 'my second novel'),
( 3, 22, 7, 'regress_rls_bob', 'my science fiction'),
( 4, 44, 9, 'regress_rls_bob', 'my first manga'),
( 5, 44, 3, 'regress_rls_bob', 'my second manga'),
( 6, 22, 2, 'regress_rls_peter', 'great science fiction'),
( 7, 33, 6, 'regress_rls_peter', 'great technology book'),
( 8, 44, 4, 'regress_rls_peter', 'great manga'),
( 9, 22, 5, 'regress_rls_david', 'awesome science fiction'),
(10, 33, 4, 'regress_rls_david', 'awesome technology book'),
(11, 55, 8, 'regress_rls_alice', 'great biography'),
(12, 33, 10, 'regress_rls_admin', 'physical technology'),
(13, 55, 5, 'regress_rls_single_user', 'Beethoven biography');
ANALYZE regress_rls_schema.document_row;
CREATE TABLE regress_rls_schema.document_col(
did int,
cid int,
dlevel int not null,
dauthor name,
dtitle text
);
GRANT ALL ON regress_rls_schema.document_col TO public;
INSERT INTO regress_rls_schema.document_col SELECT * FROM regress_rls_schema.document_row;
ANALYZE regress_rls_schema.document_col;
-- create partition table
CREATE TABLE par_row_t1 (id int, a int, b text)partition by range (a)
(
partition par_row_t1_p0 values less than(10),
partition par_row_t1_p1 values less than(50),
partition par_row_t1_p2 values less than(100),
partition par_row_t1_p3 values less than (maxvalue)
);
CREATE TABLE par_col_t1(id int, a int, b text) with(orientation = column) /*distribute by hash (id)*/ PARTITION BY RANGE (a)
(
partition par_col_t1_p0 values less than(10),
partition par_col_t1_p1 values less than(50),
partition par_col_t1_p2 values less than(100),
partition par_col_t1_p3 values less than (maxvalue)
);
INSERT INTO par_row_t1 VALUES (generate_series(1, 150) % 24, generate_series(1, 150), 'huawei');
INSERT INTO par_col_t1 VALUES (generate_series(1, 150) % 24, generate_series(1, 150), 'huawei');
GRANT SELECT ON par_row_t1 TO PUBLIC;
GRANT SELECT ON par_col_t1 TO PUBLIC;
CREATE ROW LEVEL SECURITY POLICY par_row_t1_rls1 ON par_row_t1 AS PERMISSIVE TO public USING(a <= 20);
CREATE ROW LEVEL SECURITY POLICY par_row_t1_rls2 ON par_row_t1 AS RESTRICTIVE TO regress_rls_group2 USING(id < 30);
CREATE ROW LEVEL SECURITY POLICY par_col_t1_rls1 ON par_col_t1 AS PERMISSIVE TO public USING(a <= 20);
CREATE ROW LEVEL SECURITY POLICY par_col_t1_rls2 ON par_col_t1 AS RESTRICTIVE TO regress_rls_group2 USING(id < 30);
ALTER TABLE par_row_t1 ENABLE ROW LEVEL SECURITY;
ALTER TABLE par_col_t1 ENABLE ROW LEVEL SECURITY;
-- create replication table
CREATE TABLE tt_rep(id int, name varchar(100)) /*DISTRIBUTE BY REPLICATION*/;
GRANT SELECT ON tt_rep TO PUBLIC;
INSERT INTO tt_rep VALUES (1, 'regress_rls_alice'), (2, 'regress_rls_david'), (3, 'regress_rls_peter'), (4, 'regress_rls_bob');
ALTER TABLE tt_rep ENABLE ROW LEVEL SECURITY;
CREATE ROW LEVEL SECURITY POLICY tt_rep_rls1 ON tt_rep AS PERMISSIVE FOR SELECT TO regress_rls_group1 USING(name = current_user);
CREATE ROW LEVEL SECURITY POLICY tt_rep_rls2 ON tt_rep AS PERMISSIVE FOR SELECT TO regress_rls_group2 USING(id = 1);
-- create private table, test database private object
CREATE TABLE alice_private(id int, name varchar(100));
CREATE TABLE alice_public_1(id int, name varchar(100));
GRANT SELECT ON alice_public_1 TO regress_rls_group1;
CREATE TABLE alice_public_2(id int, name varchar(100));
GRANT SELECT ON alice_public_2 TO regress_rls_group2;
-- create temp table
CREATE TEMP TABLE temp_tt(id int, name varchar(20));
CREATE ROW LEVEL SECURITY POLICY temp_tt_rls ON temp_tt USING(id < 100);
ALTER TABLE temp_tt ENABLE ROW LEVEL SECURITY;
DROP TABLE temp_tt;
-- create 100 row level security policies on account_row
SELECT regress_rls_schema.rls_auto_create_policy('account_row', 100);
-- create 101 row level security policy on account_row, failed
CREATE ROW LEVEL SECURITY POLICY account_row_rls_101 ON regress_rls_schema.account_row USING(FALSE);
-- drop 100 row level security policies on account_row
SELECT regress_rls_schema.rls_auto_drop_policy('account_row', 100);
-- create row level security policy on account_row, succeed
CREATE ROW LEVEL SECURITY POLICY account_row_rls_101 ON regress_rls_schema.account_row USING(FALSE);
-- drop row level security policy account_row_rls_101 for account_row
DROP ROW LEVEL SECURITY POLICY account_row_rls_101 ON regress_rls_schema.account_row;
SELECT count(*) FROM pg_catalog.pg_rlspolicies where tablename = 'account_row';
-- enable row level security for document_row, document_col
ALTER TABLE regress_rls_schema.document_row ENABLE ROW LEVEL SECURITY;
ALTER TABLE regress_rls_schema.document_col ENABLE ROW LEVEL SECURITY;
-- user's security level must be higher than or equal to document's
CREATE ROW LEVEL SECURITY POLICY p01 ON document_row AS PERMISSIVE
USING (dlevel <= (SELECT aid FROM account_row WHERE aname = current_user));
CREATE ROW LEVEL SECURITY POLICY p01 ON document_col AS PERMISSIVE
USING (dlevel <= (SELECT aid FROM account_col WHERE aname = current_user));
-- try to create a policy of wrong type
CREATE ROW LEVEL SECURITY POLICY p02 ON document_row AS WHATEVER
USING (dlevel <= (SELECT aid FROM account_row WHERE aname = current_user));
-- regress_rls_david isn't allowed to anything at cid 50 or above
-- this is to make sure that we sort the policies by name first
CREATE ROW LEVEL SECURITY POLICY p02 ON document_row AS RESTRICTIVE TO regress_rls_david
USING (cid < 50);
CREATE ROW LEVEL SECURITY POLICY p02 ON document_col AS RESTRICTIVE TO regress_rls_david
USING (cid < 50);
-- and regress_rls_david isn't allowed to see manga documents
CREATE ROW LEVEL SECURITY POLICY p03 ON document_row AS RESTRICTIVE TO regress_rls_david
USING (cid <> 44);
CREATE ROW LEVEL SECURITY POLICY p03 ON document_col AS RESTRICTIVE TO regress_rls_david
USING (cid <> 44);
-- policy for update/delete
CREATE ROW LEVEL SECURITY POLICY p04 ON document_row AS RESTRICTIVE FOR UPDATE TO regress_rls_bob, regress_rls_david USING ((dlevel % 2) = 1);
CREATE ROW LEVEL SECURITY POLICY p05 ON document_row AS RESTRICTIVE FOR DELETE TO regress_rls_bob, regress_rls_david USING ((dlevel % 2) = 0);
-- policy for regress_rls_bob
CREATE ROW LEVEL SECURITY POLICY p06 ON document_row AS RESTRICTIVE FOR SELECT TO regress_rls_bob USING ((dlevel % 2) = 1);
\d
\d+ document_row
SELECT * FROM pg_rlspolicies WHERE schemaname = 'regress_rls_schema' AND tablename = 'document_row' ORDER BY policyname;
-- prepare statement
PREPARE one AS SELECT * FROM document_row ORDER BY 1;
PREPARE two AS SELECT * FROM document_col ORDER BY 1;
EXECUTE one;
EXECUTE one;
EXECUTE two;
EXECUTE two;
-- viewpoint from regress_rls_bob
SET ROLE regress_rls_bob PASSWORD 'Ttest@123';
EXECUTE one;
EXECUTE two;
SELECT * FROM document_row WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM document_col WHERE rls_fleak2(dauthor) ORDER BY did;
-- EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row WHERE rls_fleak2(dauthor) ORDER BY did;
SELECT * FROM document_col INNER JOIN category_col ON document_col.cid=category_col.cid WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM tt_rep;
SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
-- EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
\d
\df
-- viewpoint from regress_rls_peter
SET ROLE regress_rls_peter PASSWORD 'Ttest@123';
EXECUTE one;
EXECUTE two;
SELECT * FROM document_row WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM document_col WHERE rls_fleak2(dauthor) ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row WHERE rls_fleak2(dauthor) ORDER BY did;
SELECT * FROM document_col INNER JOIN category_col ON document_col.cid=category_col.cid WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM tt_rep;
SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
-- viewpoint from regress_rls_david
SET ROLE regress_rls_david PASSWORD 'Ttest@123';
EXECUTE one;
EXECUTE two;
SELECT * FROM document_row WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM document_col WHERE rls_fleak2(dauthor) ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row WHERE rls_fleak2(dauthor) ORDER BY did;
SELECT * FROM document_col INNER JOIN category_col ON document_col.cid=category_col.cid WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM tt_rep;
COPY document_row TO STDOUT;
COPY document_col TO STDOUT;
SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dauthor) ORDER BY did;
-- update and update returning
UPDATE document_row SET dlevel = dlevel + 1 - 1 WHERE did > 1;
UPDATE document_col SET dlevel = dlevel + 1 - 1 WHERE did > 1 RETURNING dauthor, did;
-- delete and delete returning
INSERT INTO document_row VALUES (100, 49, 1, 'regress_rls_david', 'testing sorting of policies');
DELETE FROM document_row WHERE did = 100;
INSERT INTO document_row VALUES (100, 49, 1, 'regress_rls_david', 'testing sorting of policies');
DELETE FROM document_row WHERE did = 100 RETURNING dauthor, did;
-- only owner can change policies
ALTER POLICY p01 ON document_row USING (true); --fail
DROP POLICY p01 ON document_col; --fail
-- check data from partition table
SELECT * FROM par_row_t1 WHERE a > 7 ORDER BY 1, 2;
SELECT * FROM par_col_t1 WHERE a > 7 ORDER BY 1, 2;
-- test create table as
CREATE TABLE document_row_david AS SELECT * FROM document_row;
SELECT COUNT(*) FROM document_row_david;
-- check table and functions
\d
\df
-- change to super user
RESET ROLE;
-- DROP USER failed, display dependency
DROP USER regress_rls_bob;
DROP OWNED BY regress_rls_bob;
select * from pg_shdepend where classid = 3254 and refclassid = 1260 and refobjid = (select oid from pg_authid where rolname = 'regress_rls_bob');
DROP USER regress_rls_bob;
ALTER POLICY p01 ON document_row USING (dauthor = current_user);
ALTER POLICY p01 ON document_row RENAME TO p12;
ALTER POLICY p12 ON document_row RENAME TO p13;
ALTER POLICY p13 ON document_row RENAME TO p01;
SELECT * FROM pg_rlspolicies ORDER BY tablename, policyname;
-- enable private object
ALTER DATABASE regression DISABLE PRIVATE OBJECT;
-- reconnect
\c
SET search_path = regress_rls_schema;
-- check audit logs
SELECT type, database, object_name, detail_info FROM pg_query_audit('2000-01-01 00:00:00', '2100-01-01 00:00:00')
WHERE detail_info LIKE '%private object%' OR detail_info LIKE '%PRIVATE OBJECT%' ORDER BY detail_info;
-- viewpoint from rls_regres_david again
SET ROLE regress_rls_david PASSWORD 'Ttest@123';
SELECT * FROM document_row ORDER BY did;
SELECT * FROM document_col ORDER BY did;
SELECT * FROM document_row WHERE rls_fleak1(dtitle) ORDER BY did;
SELECT * FROM document_row WHERE rls_fleak2(dtitle) ORDER BY did;
EXPLAIN (COSTS OFF, VERBOSE ON) SELECT * FROM document_row WHERE rls_fleak2(dtitle);
SELECT * FROM document_row INNER JOIN category_row ON document_row.cid=category_row.cid WHERE rls_fleak2(dtitle) ORDER by did;
-- test inlist
SET qrw_inlist2join_optmode=1;
CREATE TABLE inlist_t1(c1 int, c2 int, c3 int) /*DISTRIBUTE BY HASH(c1)*/;
INSERT INTO inlist_t1 SELECT v,v,v FROM generate_series(1,12) as v;
CREATE ROW LEVEL SECURITY POLICY inlist_t1_rls ON inlist_t1 USING(c3 IN (3,4,7));
ALTER TABLE inlist_t1 ENABLE ROW LEVEL SECURITY;
ALTER TABLE inlist_t1 FORCE ROW LEVEL SECURITY;
SELECT * FROM inlist_t1 ORDER BY c1;
RESET qrw_inlist2join_optmode;
-- check data from partition table
SELECT * FROM par_row_t1 WHERE a > 7 ORDER BY 1, 2;
SELECT * FROM par_col_t1 WHERE a > 7 ORDER BY 1, 2;
SELECT * FROM tt_rep;
-- check table and functions
\d
\df
-- viewpoint from regress_rls_alice again
SET ROLE regress_rls_alice PASSWORD 'Ttest@123';
ALTER TABLE tt_rep FORCE ROW LEVEL SECURITY;
ALTER TABLE par_row_t1 FORCE ROW LEVEL SECURITY;
\d
SELECT * FROM tt_rep ORDER BY id;
SELECT * FROM par_row_t1 ORDER BY id;
-- check infinite recursion for rls
CREATE TABLE aa(a int);
CREATE TABLE bb(a int);
ALTER TABLE aa ENABLE ROW LEVEL SECURITY;
ALTER TABLE bb ENABLE ROW LEVEL SECURITY;
CREATE ROW LEVEL SECURITY POLICY aa_rls ON aa USING(EXISTS (SELECT a FROM bb));
-- create failed because of infinite recursion in rls policy
CREATE ROW LEVEL SECURITY POLICY bb_rls ON bb USING(EXISTS (SELECT a FROM aa));
ALTER TABLE aa DISABLE ROW LEVEL SECURITY;
-- create succeed because of aa disable row level security
CREATE ROW LEVEL SECURITY POLICY bb_rls ON bb USING(EXISTS (SELECT a FROM aa));
ALTER TABLE aa ENABLE ROW LEVEL SECURITY;
ALTER TABLE aa FORCE ROW LEVEL SECURITY;
ALTER TABLE bb FORCE ROW LEVEL SECURITY;
-- select failed because of infinite recursion in rls policy
SELECT * FROM aa;
ALTER ROW LEVEL SECURITY POLICY aa_rls ON aa USING(a > 10);
ALTER ROW LEVEL SECURITY POLICY aa_rls ON aa USING(EXISTS (SELECT a FROM bb LIMIT 1));
DROP ROW LEVEL SECURITY POLICY aa_rls ON aa;
CREATE ROW LEVEL SECURITY POLICY aa_rls ON aa AS RESTRICTIVE FOR SELECT TO PUBLIC USING(EXISTS(SELECT a FROM (SELECT a + 100 FROM aa WHERE a > 10 and a < 100 GROUP BY a HAVING count(*) >1)));
DROP TABLE aa CASCADE;
DROP TABLE bb CASCADE;
-- check any sublink
create table aa(aa_1 int, aa_2 int, rls int);
create policy aa_rls on aa using (rls = 1);
alter table aa enable row level security;
alter table aa force row level security;
create table bb(bb_1 int, bb_2 int, rls int);
create policy bb_rls on bb using (rls = 1);
alter table bb enable row level security;
alter table bb force row level security;
explain(costs off) select aa_1 from aa, bb where bb_1 = 1 and aa_1 > (select min(aa_1) from aa where aa_2 = bb_2 and aa_2 = 1);
-- clean environment
RESET ROLE;
DROP ROW LEVEL SECURITY POLICY t12 ON inlist_t1;
DROP ROW LEVEL SECURITY POLICY IF EXISTS t12 ON inlist_t1;
DROP SCHEMA regress_rls_schema CASCADE;
DROP USER IF EXISTS regress_rls_alice;
DROP USER IF EXISTS regress_rls_bob;
DROP USER IF EXISTS regress_rls_david;
DROP USER IF EXISTS regress_rls_peter;
DROP USER IF EXISTS regress_rls_admin;
DROP USER IF EXISTS regress_rls_single_user;
DROP ROLE IF EXISTS regress_rls_group1;
DROP ROLE IF EXISTS regress_rls_group2;
-- check again
SELECT COUNT(*) FROM pg_rlspolicies;
SELECT COUNT(*) FROM pg_depend WHERE classid = 3254 OR refclassid = 3254;
SELECT COUNT(*) FROM pg_shdepend WHERE classid = 3254 OR refclassid = 3254; | the_stack |
CREATE TEMP FUNCTION
ratio(numerator float64,
denominator float64) AS (
IF
(denominator = 0,
0,
numerator / denominator));
(
WITH
billing_export_table AS (
SELECT
b.*
FROM
`{{params.billing_export_table_name }}` b
WHERE
CAST(DATETIME(usage_start_time, "America/Los_Angeles") AS DATE) >= "2018-09-20"),
project_label_credit_breakout AS (
SELECT
*
FROM
`{{ params.project_id }}.{{ params.corrected_dataset_id }}.{{ params.project_label_credit_breakout_table }}`),
project_commitments AS (
SELECT id,
commit_start_date,
commit_end_date,
ARRAY_AGG(DISTINCT TRIM(p)) project_ids,
STRUCT <region STRING,cud_type STRING,unit_type STRING,amount FLOAT64>
(ANY_VALUE(commitments_region),
ANY_VALUE(commitments_cud_type),
ANY_VALUE(commitments_unit_type),
ANY_VALUE(commitments_amount)) commitments
FROM
(
SELECT
TRIM(CAST(csv.id AS STRING)) AS id,
commit_start_date,
commit_end_date,
ARRAY_AGG(DISTINCT b.project_id ignore nulls) project_ids,
TRIM(csv.commitments_region) AS commitments_region,
TRIM(csv.commitments_cud_type) AS commitments_cud_type,
TRIM(csv.commitments_unit_type) AS commitments_unit_type,
csv.commitments_amount
FROM
(SELECT DISTINCT
project.id AS project_id,
project.ancestry_numbers AS ancestry_numbers
FROM
billing_export_table ) AS b,
`{{ params.project_id}}.{{ params.corrected_dataset_id }}.{{ params.temp_commitments_table_name }}` csv
LEFT JOIN UNNEST(SPLIT(folder_ids,",")) f
WHERE
regexp_contains(b.ancestry_numbers, f)
AND f not in ("")
GROUP BY 1,2,3,5,6,7,8
UNION ALL
SELECT
TRIM(cast(csv.id AS STRING)) AS id,
commit_start_date,
commit_end_date,
ANY_VALUE(SPLIT(TRIM(project_ids),",")) project_ids,
TRIM(csv.commitments_region) AS commitments_region,
TRIM(csv.commitments_cud_type) AS commitments_cud_type,
TRIM(csv.commitments_unit_type) AS commitments_unit_type,
csv.commitments_amount
FROM
`{{ params.project_id }}.{{ params.corrected_dataset_id }}.{{ params.temp_commitments_table_name }}` csv
WHERE
project_ids is not null
GROUP BY 1,2,3,5,6,7,8
), UNNEST(project_ids) p
GROUP BY 1,2,3
ORDER BY 1
),
PG_purchased_commitments AS (
SELECT
pc.id AS pg_id,
usage_date,
pc.commit_start_date AS commit_start_date,
pc.commit_end_date AS commit_end_date,
p.region AS region,
p.cud_type AS cud_type,
p.unit_type AS unit_type,
ANY_VALUE(project_ids) AS project_ids,
ANY_VALUE(commitments.amount) AS PG_purchased_committments,
LEAST(ANY_VALUE(commitments.amount), SUM(p.usage_amount)) AS PG_purchased_committments_usage,
SUM(p.usage_amount) AS PG_all_eligible_usage,
SUM(p.commitment_cost) AS commitment_cost,
SUM(p.cud_credit_cost) AS cud_credit_cost,
SUM(p.sud_credit_cost) AS sud_credit_cost
FROM
project_label_credit_breakout p
JOIN
project_commitments pc
ON
p.project_id IN UNNEST(pc.project_ids)
AND p.region = commitments.region
AND p.cud_type = commitments.cud_type
AND p.unit_type = commitments.unit_type
AND usage_date BETWEEN commit_start_date
AND commit_end_date
GROUP BY 1,2,3,4,5,6,7),
BA_credit_breakdown AS(
SELECT
p.usage_date AS usage_date,
p.region AS region,
p.cud_type AS cud_type,
p.unit_type AS unit_type,
SUM(p.usage_amount) AS eligible_usage,
SUM(p.usage_amount - p.cud_credit_usage_amount) AS sud_credit_usage_amount,
SUM(p.cud_credit_usage_amount) AS cud_credit_usage_amount,
SUM(p.commitment_cost) AS commitment_cost,
SUM(p.cud_credit_cost) AS cud_credit_cost,
SUM(p.sud_credit_cost) AS sud_credit_cost
FROM
project_label_credit_breakout p
GROUP BY 1, 2, 3, 4),
BA_purchased_credit_breakout_temp AS (
SELECT
pgc.usage_date AS usage_date,
pgc.region AS region,
pgc.cud_type AS cud_type,
pgc.unit_type AS unit_type,
SUM(pgc.PG_purchased_committments) AS BA_purchased_committments,
SUM(pgc.PG_purchased_committments_usage) AS BA_purchased_committments_usage,
SUM(pgc.PG_all_eligible_usage) AS BA_all_eligible_usage,
SUM(pgc.PG_purchased_committments_usage) AS BA_usage_amount,
ANY_VALUE(b.commitment_cost) as BA_commitment_cost_a,
--(ANY_VALUE(b.commitment_cost) * LEAST(1, ratio(SUM(pgc.PG_purchased_committments_usage),
-- ANY_VALUE(b.cud_credit_usage_amount)))) AS BA_commitment_cost_b,
(ANY_VALUE(b.cud_credit_cost) * LEAST(1, ratio(SUM(pgc.PG_purchased_committments_usage),
ANY_VALUE(b.cud_credit_usage_amount)))) AS BA_cud_credit_cost,
ANY_VALUE(b.cud_credit_usage_amount) *(LEAST(1, ratio(SUM(pgc.PG_purchased_committments_usage),
ANY_VALUE(b.cud_credit_usage_amount)))) AS BA_cud_credit_usage,
ANY_VALUE(b.sud_credit_cost) AS BA_sud_credit_cost
FROM
PG_purchased_commitments pgc
JOIN
BA_credit_breakdown b
ON
pgc.usage_date = b.usage_date
AND pgc.region = b.region
AND pgc.cud_type = b.cud_type
AND pgc.unit_type = b.unit_type
GROUP BY 1, 2, 3, 4),
BA_unpurchased_credit_breakout AS (
SELECT
b.usage_date AS usage_date,
b.region AS region,
b.cud_type AS cud_type,
b.unit_type AS unit_type,
SUM(usage_amount) AS eligible_usage,
SUM(usage_amount) - ANY_VALUE(
IF
(pcb.BA_usage_amount IS NULL,
0,
pcb.BA_usage_amount)) AS BA_unpurchased_usage,
SUM(cud_credit_usage_amount) - ANY_VALUE(
IF
(pcb.BA_usage_amount IS NULL,
0,
pcb.BA_usage_amount)) AS BA_usage_amount,
0 as BA_commitment_cost_a,
--commitment cost * Ratio of its credit usage over total cud purchase commitment usage
ANY_VALUE(pcb.BA_commitment_cost_a) * ratio(
SUM(b.cud_credit_usage_amount) - ANY_VALUE(
IF (pcb.BA_cud_credit_usage is NULL,
0,
pcb.BA_cud_credit_usage)), ANY_VALUE(pcb.BA_purchased_committments)) as BA_commitment_cost_b,
SUM(b.cud_credit_cost) - ANY_VALUE(
IF
(pcb.BA_cud_credit_cost IS NULL,
0,
pcb.BA_cud_credit_cost)) AS BA_cud_credit_cost,
ANY_VALUE(pcb.BA_sud_credit_cost) AS BA_sud_credit_cost,
SUM(b.cud_credit_usage_amount) - ANY_VALUE(
IF (pcb.BA_cud_credit_usage is NULL,
0,
pcb.BA_cud_credit_usage)) AS cud_credit_usage_amount
FROM
project_label_credit_breakout b
LEFT JOIN
BA_purchased_credit_breakout_temp pcb
ON
pcb.usage_date = b.usage_date
AND pcb.region = b.region
AND pcb.cud_type = b.cud_type
AND pcb.unit_type = b.unit_type
GROUP BY 1, 2, 3, 4),
BA_purchased_credit_breakout AS (
SELECT ba_p.*,
b.commitment_cost - ba_u.BA_commitment_cost_b AS BA_commitment_cost_b
FROM
BA_purchased_credit_breakout_temp as ba_p
JOIN
BA_unpurchased_credit_breakout as ba_u
ON
ba_p.usage_date = ba_u.usage_date
AND ba_p.region = ba_u.region
AND ba_p.cud_type = ba_u.cud_type
AND ba_p.unit_type = ba_u.unit_type
JOIN
BA_credit_breakdown b
ON
ba_p.usage_date = b.usage_date
AND ba_p.region = b.region
AND ba_p.cud_type = b.cud_type
AND ba_p.unit_type = b.unit_type
),
PG_purchased_credit_breakout AS (
SELECT
pg_id,
pg.usage_date AS usage_date,
pg.commit_Start_date AS commit_start_date,
pg.commit_end_date AS commit_end_date,
pg.region AS region,
pg.cud_type AS cud_type,
pg.unit_type AS unit_type,
pg.project_ids AS project_ids,
pg.PG_purchased_committments_usage AS PG_purchased_committments_usage,
pg.PG_all_eligible_usage AS PG_all_eligible_usage,
b.BA_usage_amount * ratio(PG_purchased_committments_usage,
b.BA_usage_amount) AS PG_usage_amount,
b.BA_commitment_cost_a * ratio(PG_purchased_committments,
b.BA_purchased_committments) AS PG_commitment_cost_a,
b.BA_commitment_cost_b * ratio(PG_purchased_committments,
b.BA_purchased_committments) AS PG_commitment_cost_b,
b.BA_cud_credit_cost * ratio(PG_purchased_committments_usage,
b.BA_usage_amount) AS PG_cud_credit_cost,
b.BA_sud_credit_cost * ratio(PG_purchased_committments_usage,
b.BA_usage_amount) AS PG_sud_credit_cost,
b.BA_cud_credit_usage * ratio(PG_purchased_committments_usage,
b.BA_usage_amount) AS PG_cud_credit_usage
FROM
PG_purchased_commitments AS pg
JOIN
BA_purchased_credit_breakout AS b
ON
pg.usage_date = b.usage_date
AND pg.region = b.region
AND pg.cud_type = b.cud_type
AND pg.unit_type = b.unit_type),
final_cud_credits_data_purchased AS (
SELECT
billing_account_id,
usage_date,
service_id,
cost_type,
service_description,
region,
unit_type,
cud_type,
project_id,
project_name,
ancestry_numbers,
labels,
SUM(usage_amount) AS usage_amount,
SUM(P_sud_eligible_usage) AS P_sud_eligible_usage,
SUM(P_alloc_commitment_cost_a) AS P_alloc_commitment_cost_a,
SUM(P_alloc_commitment_cost_b) AS P_alloc_commitment_cost_b,
SUM(P_alloc_usage) AS P_alloc_usage,
SUM(P_alloc_cud_credit_cost) AS P_alloc_cud_credit_cost
FROM (
SELECT
p.billing_account_id,
p.usage_date AS usage_date,
p.service_id AS service_id,
p.cost_type AS cost_type,
p.service_description AS service_description,
p.region AS region,
p.unit_type AS unit_type,
p.cud_type AS cud_type,
p.project_id AS project_id,
p.project_name AS project_name,
p.ancestry_numbers AS ancestry_numbers,
labels,
p.usage_amount AS usage_amount,
-- we take the usage amount and remove the current usage, which gives us the potential
-- sud usage.
-- Note that this calculates sud credit for total remaining, so it does not take into account
-- some usage that will get cud allocated as part of unpurchased tranche. That is okay, because
-- we remove that part of cud usage from the sud_eligible_usage downstream
p.usage_amount - ratio(p.usage_amount, pgc.PG_all_eligible_usage)* pgc.PG_cud_credit_usage AS P_sud_eligible_usage,
ratio(p.usage_amount, pgc.PG_all_eligible_usage) * pgc.PG_purchased_committments_usage AS P_alloc_usage,
ratio(p.usage_amount, pgc.PG_all_eligible_usage) * pgc.PG_cud_credit_cost AS P_alloc_cud_credit_cost,
ratio(p.usage_amount, pgc.PG_all_eligible_usage) * pgc.PG_commitment_cost_a AS P_alloc_commitment_cost_a,
ratio(p.usage_amount, pgc.PG_all_eligible_usage) * pgc.PG_commitment_cost_b AS P_alloc_commitment_cost_b
FROM
project_label_credit_breakout AS p
JOIN
PG_purchased_credit_breakout AS pgc
ON
p.usage_date = pgc.usage_date
JOIN
BA_purchased_credit_breakout AS ba
ON
p.usage_date = ba.usage_date
JOIN BA_credit_breakdown AS bacb
ON p.usage_date = bacb.usage_date
AND p.region = bacb.region
AND p.unit_type = bacb.unit_type
AND p.cud_type = bacb.cud_type
AND p.region = ba.region
AND p.unit_type = ba.unit_type
AND p.region = pgc.region
AND p.cud_type = pgc.cud_type
AND p.unit_type = pgc.unit_type
AND p.project_id IN UNNEST(pgc.project_ids)
GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18)
GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12),
final_cud_credits_data_unpurchased AS (
SELECT
p.billing_account_id,
p.usage_date AS usage_date,
p.service_id AS service_id,
p.cost_type AS cost_type,
p.service_description AS service_description,
p.region AS region,
p.unit_type AS unit_type,
p.cud_type AS cud_type,
p.project_id AS project_id,
p.project_name as project_name,
p.ancestry_numbers as ancestry_numbers,
p.labels,
0 as usage_amount,
--If cud_credit_usage_amount for this unpurchased group is 0 and there was no sud eligible usage from purchased group,
--then all of usage_amount is sud eligible.
--If there is some credit usage in this unpurchased group, then we do the following,
-- If there was no eligible sud eligible usage from purchased group, then we just subtract the cud usage from the total usage amount.
-- If, however, there was some sud eligible usage from purchased group, then we just remove the previous cud usage amount from
-- current usage and multiply by -1 to get a negative number that can be removed from all sud eligible from purchased group
IF(b.cud_credit_usage_amount = 0,
if (fdp.P_sud_eligible_usage is null, p.usage_amount , 0),
IF (fdp.P_sud_eligible_usage is NOT NULL,
-1*(ratio((p.usage_amount - (fdp.usage_amount - fdp.P_sud_eligible_usage)),
BA_unpurchased_usage) * BA_usage_amount),
p.usage_amount - ( ratio(p.usage_amount, BA_unpurchased_usage) * BA_usage_amount ) ) ) as P_sud_eligible_usage,
0 AS P_alloc_commitment_cost_a,
ratio((p.usage_amount -
IF(fdp.P_alloc_usage IS NULL,
0,
fdp.P_alloc_usage)),
BA_unpurchased_usage) * b.BA_commitment_cost_b as P_alloc_commitment_cost_b,
0 AS P_alloc_usage,
ratio((p.usage_amount -
IF(fdp.P_alloc_usage IS NULL,
0,
fdp.P_alloc_usage)),
BA_unpurchased_usage) * BA_cud_credit_cost AS P_alloc_cud_credit_cost
FROM
project_label_credit_breakout AS p
JOIN
BA_unpurchased_credit_breakout AS b
ON
p.usage_date = b.usage_date
AND p.region = b.region
AND p.cud_type = b.cud_type
AND p.unit_type = b.unit_type
JOIN BA_credit_breakdown AS bacb
on p.usage_date = bacb.usage_date
AND p.region = bacb.region
AND p.unit_type = bacb.unit_type
AND p.cud_type = bacb.cud_type
LEFT JOIN
final_cud_credits_data_purchased fdp
ON
fdp.project_id = p.project_id
AND fdp.labels = p.labels
AND fdp.usage_date = p.usage_date
AND fdp.region = p.region
AND fdp.cud_type = p.cud_type
AND fdp.unit_type = p.unit_type
GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18),
final_cud_credits_data_all AS (
SELECT
billing_account_id,
usage_date AS usage_date,
service_id AS service_id,
cost_type AS cost_type,
service_description AS service_description,
region AS region,
unit_type AS unit_type,
cud_type AS cud_type,
project_id AS project_id,
project_name AS project_name,
ancestry_numbers AS ancestry_numbers,
labels AS labels,
sum(P_sud_eligible_usage) AS P_sud_eligible_usage,
sum(P_alloc_commitment_cost_a) AS P_alloc_commitment_cost_a,
sum(P_alloc_commitment_cost_b) AS P_alloc_commitment_cost_b,
sum(P_alloc_cud_credit_cost) AS P_alloc_cud_credit_cost
FROM (
SELECT
*
FROM
final_cud_credits_data_purchased
UNION ALL
SELECT
*
FROM
final_cud_credits_data_unpurchased
)
GROUP BY 1,2,3,4,5,6,7,8,9,10,11,12),
final_data AS (
SELECT
*
FROM
(SELECT
p.billing_account_id,
p.usage_date AS usage_date,
p.service_id AS service_id,
p.cost_type AS cost_type,
p.service_description AS service_description,
p.region AS region,
p.unit_type AS unit_type,
p.cud_type AS cud_type,
p.project_id AS project_id,
p.project_name AS project_name,
p.ancestry_numbers AS ancestry_numbers,
p.labels,
p.P_sud_eligible_usage * ratio(bacb.sud_credit_cost , bacb.sud_credit_usage_amount) as P_alloc_sud_credit_cost,
p.P_alloc_cud_credit_cost,
p.P_alloc_commitment_cost_a,
p.P_alloc_commitment_cost_b
FROM
final_cud_credits_data_all AS p
JOIN BA_credit_breakdown AS bacb
ON p.usage_date = bacb.usage_date
AND p.region = bacb.region
AND p.unit_type = bacb.unit_type
AND p.cud_type = bacb.cud_type
)
WHERE P_alloc_sud_credit_cost <> 0
OR P_alloc_cud_credit_cost <> 0
OR P_alloc_commitment_cost_a <> 0
OR P_alloc_commitment_cost_b <> 0)
SELECT
*
FROM
final_data) | the_stack |
PRINT 'Inserting Purchasing.Suppliers'
GO
DECLARE @CurrentDateTime datetime2(7) = '20130101'
DECLARE @EndOfTime datetime2(7) = '99991231 23:59:59.9999999'
DECLARE @Bank NVARCHAR(50)
-- City Variables
DECLARE @myCityID AS INT
DECLARE @myCityName AS NVARCHAR(50)
DECLARE @myStateProvinceCode AS NVARCHAR(5)
DECLARE @myStateProvinceName AS NVARCHAR(50)
DECLARE @myAreaCode AS NVARCHAR(3)
/* A Datum Corporation ----------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 1, 'A Datum Corporation'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Reio Kabin')
, [DataLoadSimulation].[GetPersonID] ('Oliver Kivi')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Road Freight')
, @myCityID, @myCityID
, 'AA20384'
, 'A Datum Corporation', @Bank, '356981', '8575824136', '25986'
, 14, NULL
, '(' + @myAreaCode + ') 555-0100', '(' + @myAreaCode + ') 555-0101'
, 'http://www.adatum.com'
, 'Suite 10','183838 Southwest Boulevard','46077',NULL
, 'PO Box 1039', 'Surrey', '46077'
, 1, @CurrentDateTime, @EndOfTime)
/* Contoso, Ltd. ----------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 2, 'Contoso, Ltd.'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Hanna Mihhailov')
, [DataLoadSimulation].[GetPersonID] ('Paulus Lippmaa')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Refrigerated Road Freight')
, @myCityID, @myCityID
, 'B2084020'
, 'Contoso Ltd', @Bank, '358698', '4587965215', '25868'
, 7, NULL
, '(' + @myAreaCode + ') 555-0100', '(' + @myAreaCode + ') 555-0101'
, 'http://www.contoso.com'
, 'Unit 2', '2934 Night Road','98253',NULL
, 'PO Box 1012', 'Jolimont', '98253'
, 1, @CurrentDateTime, @EndOfTime
)
/* Consolidated Messenger -------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 3, 'Consolidated Messenger'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Courier')
, [DataLoadSimulation].[GetPersonID] ('Kerstin Parn')
, [DataLoadSimulation].[GetPersonID] ('Helen Ahven')
, [DataLoadSimulation].[GetDeliveryMethodID] ('NULL')
, @myCityID, @myCityID
, '209340283'
, 'Consolidated Messenger', @Bank, '354269','3254872158','45698'
, 30, NULL
, '(' + @myAreaCode + ') 555-0100','(' + @myAreaCode + ') 555-0101'
,'http://www.consolidatedmessenger.com'
, '','894 Market Day Street','94101',NULL
, 'PO Box 1014','West Mont','94101'
, 1, @CurrentDateTime, @EndOfTime
)
/* Fabrikam, Inc. ---------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 4, 'Fabrikam, Inc.'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Clothing Supplier')
, [DataLoadSimulation].[GetPersonID] ('Bill Lawson')
, [DataLoadSimulation].[GetPersonID] ('Helen Moore')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Road Freight')
, @myCityID, @myCityID
, '293092'
, 'Fabrikam Inc', @Bank, '789568', '4125863879', '12546'
, 30, NULL
, '(' + @myAreaCode + ') 555-0104', '(' + @myAreaCode + ') 555-0108'
, 'http://www.fabrikam.com'
, 'Level 2', '393999 Woodberg Road', '40351', NULL
, 'PO Box 301', 'Eaglemont', '40351'
, 1, @CurrentDateTime, @EndOfTime
)
/* Graphic Design Institute -----------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 5, 'Graphic Design Institute'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Penny Buck')
, [DataLoadSimulation].[GetPersonID] ('Donna Smith')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Refrigerated Air Freight')
, @myCityID, @myCityID
, '08803922'
, 'Graphic Design Institute', @Bank, '563215', '1025869354', '32587'
, 14, NULL
, '(' + @myAreaCode + ') 555-0105', '(' + @myAreaCode + ') 555-0106'
, 'http://www.graphicdesigninstitute.com'
, '', '45th Street', '64847', NULL
, 'PO Box 393', 'Willow', '64847'
, 1, @CurrentDateTime, @EndOfTime
)
/* Humongous Insurance ----------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 6, 'Humongous Insurance'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Insurance Services Supplier')
, [DataLoadSimulation].[GetPersonID] ('Madelaine Cartier')
, [DataLoadSimulation].[GetPersonID] ('Annette Talon')
, [DataLoadSimulation].[GetDeliveryMethodID] ('NULL')
, @myCityID, @myCityID
, '082420938'
, 'Humongous Insurance', @Bank, '325001', '2569874521', '32569'
, 14, NULL
, '(' + @myAreaCode + ') 555-0105', '(' + @myAreaCode + ') 555-0100'
, 'http://www.humongousinsurance.com'
, '', '9893 Mount Norris Road', '37770', NULL
, 'PO Box 94829', 'Boxville', '37770'
, 1, @CurrentDateTime, @EndOfTime
)
/* Litware, Inc. ----------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 7, 'Litware, Inc.'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Packaging Supplier')
, [DataLoadSimulation].[GetPersonID] ('Elias Myllari')
, [DataLoadSimulation].[GetPersonID] ('Vilma Niva')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Courier')
, @myCityID, @myCityID
, 'BC0280982'
, 'Litware Inc', @Bank, '358769', '3256896325', '21445'
, 30, NULL
, '(' + @myAreaCode + ') 555-0108', '(' + @myAreaCode + ') 555-0104'
, 'http://www.litwareinc.com'
, 'Level 3', '19 Le Church Street', '95245', NULL
, 'PO Box 20290', 'Jackson', '95245'
, 1, @CurrentDateTime, @EndOfTime
)
/* Lucerne Publishing -----------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 8, 'Lucerne Publishing'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Prem Prabhu')
, [DataLoadSimulation].[GetPersonID] ('Sunita Jadhav')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Refrigerated Air Freight')
, @myCityID, @myCityID
, 'JQ082304802'
, 'Lucerne Publishing', @Bank, '654789', '3254123658', '21569'
, 30, NULL
, '(' + @myAreaCode + ') 555-0103', '(' + @myAreaCode + ') 555-0105'
, 'http://www.lucernepublishing.com'
, 'Suite 34', '949482 Miller Boulevard', '37659', NULL
, 'PO Box 8747', 'Westerfold', '37659'
, 1, @CurrentDateTime, @EndOfTime
)
/* Lucerne Publishing -----------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
( 9, 'Nod Publishers'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Marcos Costa')
, [DataLoadSimulation].[GetPersonID] ('Matheus Oliveira')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Refrigerated Air Freight')
, @myCityID, @myCityID
, 'GL08029802'
, 'Nod Publishers', @Bank, '365985', '2021545878', '48758'
, 7, 'Marcos is not in on Mondays'
, '(' + @myAreaCode + ') 555-0100', '(' + @myAreaCode + ') 555-0101'
, 'http://www.nodpublishers.com'
, 'Level 1', '389 King Street', '27906', NULL
, 'PO Box 3390', 'Anderson', '27906'
, 1, @CurrentDateTime, @EndOfTime
)
/* Northwind Electric Cars ------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
(10, 'Northwind Electric Cars'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Toy Supplier')
, [DataLoadSimulation].[GetPersonID] ('Eliza Soderberg')
, [DataLoadSimulation].[GetPersonID] ('Sara Karlsson')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Air Freight')
, @myCityID, @myCityID
, 'ML0300202'
, 'Northwind Electric Cars', @Bank, '325447', '3258786987', '36214'
, 30, NULL
, '(' + @myAreaCode + ') 555-0105', '(' + @myAreaCode + ') 555-0104'
, 'http://www.northwindelectriccars.com'
, '', '440 New Road', '07860', NULL
, 'PO Box 30920', 'Arlington', '07860'
, 1, @CurrentDateTime, @EndOfTime
)
/* Trey Research ----------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
(11, 'Trey Research'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Marketing Services Supplier')
, [DataLoadSimulation].[GetPersonID] ('Donald Jones')
, [DataLoadSimulation].[GetPersonID] ('Sharon Graham')
, [DataLoadSimulation].[GetDeliveryMethodID] ('NULL')
, @myCityID, @myCityID
, '082304822'
, 'Trey Research', @Bank, '658968', '1254785321', '56958'
, 7, NULL
, '(' + @myAreaCode + ') 555-0103', '(' + @myAreaCode + ') 555-0101'
, 'http://www.treyresearch.net'
, 'Level 43', '9401 Polar Avenue', '57543', NULL
, 'PO Box 595', 'Port Fairy', '57543'
, 1, @CurrentDateTime, @EndOfTime
)
/* The Phone Company ------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
(12, 'The Phone Company'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Novelty Goods Supplier')
, [DataLoadSimulation].[GetPersonID] ('Hai Dam')
, [DataLoadSimulation].[GetPersonID] ('Thanh Dinh')
, [DataLoadSimulation].[GetDeliveryMethodID] ('Road Freight')
, @myCityID, @myCityID
, '237408032'
, 'The Phone Company', @Bank, '214568', '7896236589', '25478'
, 30, NULL
, '(' + @myAreaCode + ') 555-0105', '(' + @myAreaCode + ') 555-0105'
, 'http://www.thephone-company.com'
, 'Level 83', '339 Toorak Road', '56732', NULL
, 'PO Box 3837', 'Ferny Wood', '56732'
, 1, @CurrentDateTime, @EndOfTime
)
/* Woodgrove Bank ---------------------------------------------------------------------*/
EXEC [DataLoadSimulation].[GetRandomCity]
@CityID = @myCityID OUTPUT
, @CityName = @myCityName OUTPUT
, @StateProvinceCode = @myStateProvinceCode OUTPUT
, @StateProvinceName = @myStateProvinceName OUTPUT
, @AreaCode = @myAreaCode OUTPUT
SET @Bank = 'Woodgrove Bank ' + @myCityName
INSERT Purchasing.Suppliers
( SupplierID, SupplierName
, SupplierCategoryID
, PrimaryContactPersonID
, AlternateContactPersonID
, DeliveryMethodID
, DeliveryCityID, PostalCityID
, SupplierReference
, BankAccountName, BankAccountBranch, BankAccountCode, BankAccountNumber, BankInternationalCode
, PaymentDays, InternalComments
, PhoneNumber, FaxNumber
, WebsiteURL
, DeliveryAddressLine1, DeliveryAddressLine2, DeliveryPostalCode, DeliveryLocation
, PostalAddressLine1, PostalAddressLine2, PostalPostalCode
, LastEditedBy, ValidFrom, ValidTo)
VALUES
(13, 'Woodgrove Bank'
, [DataLoadSimulation].[GetSupplierCategoryID] ('Financial Services Supplier')
, [DataLoadSimulation].[GetPersonID] ('Hubert Helms')
, [DataLoadSimulation].[GetPersonID] ('Donald Small')
, [DataLoadSimulation].[GetDeliveryMethodID] ('NULL')
, @myCityID, @myCityID
, '028034202'
, 'Woodgrove Bank', @Bank, '325698', '2147825698', '65893'
, 7, 'Only speak to Donald if Hubert really is not available'
, '(' + @myAreaCode + ') 555-0103', '(' + @myAreaCode + ') 555-0107'
, 'http://www.woodgrovebank.com'
, 'Level 3', '8488 Vienna Boulevard', '94101', NULL
, 'PO Box 2390', 'Canterbury', '94101'
, 1, @CurrentDateTime, @EndOfTime
)
UPDATE s
SET s.DeliveryLocation = c.[Location]
, s.[ValidFrom] = DATEADD(minute, CEILING(RAND() * 5), @CurrentDateTime)
FROM Purchasing.Suppliers AS s
INNER JOIN [Application].Cities AS c
ON s.DeliveryCityID = c.CityID
GO | the_stack |
DROP SCHEMA IF EXISTS _rrule CASCADE;
DROP CAST IF EXISTS (_rrule.RRULE AS TEXT);
DROP CAST IF EXISTS (TEXT AS _rrule.RRULE);
CREATE SCHEMA _rrule;
CREATE TYPE _rrule.FREQ AS ENUM (
'YEARLY',
'MONTHLY',
'WEEKLY',
'DAILY'
);
CREATE TYPE _rrule.DAY AS ENUM (
'MO',
'TU',
'WE',
'TH',
'FR',
'SA',
'SU'
);
CREATE TABLE _rrule.RRULE (
"freq" _rrule.FREQ NOT NULL,
"interval" INTEGER DEFAULT 1 NOT NULL CHECK(0 < "interval"),
"count" INTEGER,
"until" TIMESTAMP,
"bysecond" INTEGER[] CHECK (0 <= ALL("bysecond") AND 60 > ALL("bysecond")),
"byminute" INTEGER[] CHECK (0 <= ALL("byminute") AND 60 > ALL("byminute")),
"byhour" INTEGER[] CHECK (0 <= ALL("byhour") AND 24 > ALL("byhour")),
"byday" _rrule.DAY[],
"bymonthday" INTEGER[] CHECK (31 >= ALL("bymonthday") AND 0 <> ALL("bymonthday") AND -31 <= ALL("bymonthday")),
"byyearday" INTEGER[] CHECK (366 >= ALL("byyearday") AND 0 <> ALL("byyearday") AND -366 <= ALL("byyearday")),
"byweekno" INTEGER[] CHECK (53 >= ALL("byweekno") AND 0 <> ALL("byweekno") AND -53 <= ALL("byweekno")),
"bymonth" INTEGER[] CHECK (0 < ALL("bymonth") AND 12 >= ALL("bymonth")),
"bysetpos" INTEGER[] CHECK(366 >= ALL("bysetpos") AND 0 <> ALL("bysetpos") AND -366 <= ALL("bysetpos")),
"wkst" _rrule.DAY,
CONSTRAINT freq_yearly_if_byweekno CHECK("freq" = 'YEARLY' OR "byweekno" IS NULL)
);
CREATE TABLE _rrule.RRULESET (
"dtstart" TIMESTAMP NOT NULL,
"dtend" TIMESTAMP,
"rrule" _rrule.RRULE,
"exrule" _rrule.RRULE,
"rdate" TIMESTAMP[],
"exdate" TIMESTAMP[]
);
CREATE TYPE _rrule.exploded_interval AS (
"months" INTEGER,
"days" INTEGER,
"seconds" INTEGER
);CREATE OR REPLACE FUNCTION _rrule.explode_interval(INTERVAL)
RETURNS _rrule.EXPLODED_INTERVAL AS $$
SELECT
(
EXTRACT(YEAR FROM $1) * 12 + EXTRACT(MONTH FROM $1),
EXTRACT(DAY FROM $1),
EXTRACT(HOUR FROM $1) * 3600 + EXTRACT(MINUTE FROM $1) * 60 + EXTRACT(SECOND FROM $1)
)::_rrule.EXPLODED_INTERVAL;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.factor(INTEGER, INTEGER)
RETURNS INTEGER AS $$
SELECT
CASE
WHEN ($1 = 0 AND $2 = 0) THEN NULL
WHEN ($1 = 0 OR $2 = 0) THEN 0
WHEN ($1 % $2 <> 0) THEN 0
ELSE $1 / $2
END;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.interval_contains(INTERVAL, INTERVAL)
RETURNS BOOLEAN AS $$
-- Any fields that have 0 must have zero in each.
WITH factors AS (
SELECT
_rrule.factor(a.months, b.months) AS months,
_rrule.factor(a.days, b.days) AS days,
_rrule.factor(a.seconds, b.seconds) AS seconds
FROM _rrule.explode_interval($2) a, _rrule.explode_interval($1) b
)
SELECT
COALESCE(months <> 0, TRUE)
AND
COALESCE(days <> 0, TRUE)
AND
COALESCE(seconds <> 0, TRUE)
AND
COALESCE(months = days, TRUE)
AND
COALESCE(months = seconds, TRUE)
FROM factors;
$$ LANGUAGE SQL IMMUTABLE STRICT;CREATE OR REPLACE FUNCTION _rrule.parse_line (input TEXT, marker TEXT)
RETURNS SETOF TEXT AS $$
-- Clear spaces at the front of the lines
WITH A4 as (SELECT regexp_replace(input, '^\s*', '', 'ng') "r"),
-- Clear all lines except the ones starting with marker
A5 as (SELECT regexp_replace(A4."r", '^(?!' || marker || ').*?$', '', 'ng') "r" FROM A4),
-- Replace carriage returns with blank space.
A10 as (SELECT regexp_replace(A5."r", E'[\\n\\r]+', '', 'g') "r" FROM A5),
-- Remove marker prefix.
A15 as (SELECT regexp_replace(A10."r", marker || ':(.*)$', '\1') "r" FROM A10),
-- Trim
A17 as (SELECT trim(A15."r") "r" FROM A15),
-- Split each key-value pair into a row in a table
A20 as (SELECT regexp_split_to_table(A17."r", ';') "r" FROM A17)
-- Split each key value pair into an array, e.g. {'FREQ', 'DAILY'}
SELECT "r" AS "y"
FROM A20
WHERE "r" != '';
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.timestamp_to_day("ts" TIMESTAMP) RETURNS _rrule.DAY AS $$
SELECT CAST(CASE to_char("ts", 'DY')
WHEN 'MON' THEN 'MO'
WHEN 'TUE' THEN 'TU'
WHEN 'WED' THEN 'WE'
WHEN 'THU' THEN 'TH'
WHEN 'FRI' THEN 'FR'
WHEN 'SAT' THEN 'SA'
WHEN 'SUN' THEN 'SU'
END as _rrule.DAY);
$$ LANGUAGE SQL IMMUTABLE;
CREATE CAST (TIMESTAMP AS _rrule.DAY)
WITH FUNCTION _rrule.timestamp_to_day(TIMESTAMP)
AS IMPLICIT;CREATE OR REPLACE FUNCTION _rrule.enum_index_of(anyenum)
RETURNS INTEGER AS $$
SELECT row_number FROM (
SELECT (row_number() OVER ())::INTEGER, "value"
FROM unnest(enum_range($1)) "value"
) x
WHERE "value" = $1;
$$ LANGUAGE SQL IMMUTABLE STRICT;
COMMENT ON FUNCTION _rrule.enum_index_of(anyenum) IS 'Given an ENUM value, return it''s index.';
CREATE OR REPLACE FUNCTION _rrule.integer_array (TEXT)
RETURNS integer[] AS $$
SELECT ('{' || $1 || '}')::integer[];
$$ LANGUAGE SQL IMMUTABLE STRICT;
COMMENT ON FUNCTION _rrule.integer_array (text) IS 'Coerce a text string into an array of integers';
CREATE OR REPLACE FUNCTION _rrule.day_array (TEXT)
RETURNS _rrule.DAY[] AS $$
SELECT ('{' || $1 || '}')::_rrule.DAY[];
$$ LANGUAGE SQL IMMUTABLE STRICT;
COMMENT ON FUNCTION _rrule.day_array (text) IS 'Coerce a text string into an array of "rrule"."day"';
CREATE OR REPLACE FUNCTION _rrule.array_join(ANYARRAY, "delimiter" TEXT)
RETURNS TEXT AS $$
SELECT string_agg(x::text, "delimiter")
FROM unnest($1) x;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.explode(_rrule.RRULE)
RETURNS SETOF _rrule.RRULE AS 'SELECT $1' LANGUAGE SQL IMMUTABLE STRICT;
COMMENT ON FUNCTION _rrule.explode (_rrule.RRULE) IS 'Helper function to allow SELECT * FROM explode(rrule)';
CREATE OR REPLACE FUNCTION _rrule.compare_equal(_rrule.RRULE, _rrule.RRULE)
RETURNS BOOLEAN AS $$
SELECT count(*) = 1 FROM (
SELECT * FROM _rrule.explode($1) UNION SELECT * FROM _rrule.explode($2)
) AS x;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.compare_not_equal(_rrule.RRULE, _rrule.RRULE)
RETURNS BOOLEAN AS $$
SELECT count(*) = 2 FROM (
SELECT * FROM _rrule.explode($1) UNION SELECT * FROM _rrule.explode($2)
) AS x;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.build_interval("interval" INTEGER, "freq" _rrule.FREQ)
RETURNS INTERVAL AS $$
-- Transform ical time interval enums into Postgres intervals, e.g.
-- "WEEKLY" becomes "WEEKS".
SELECT ("interval" || ' ' || regexp_replace(regexp_replace("freq"::TEXT, 'LY', 'S'), 'IS', 'YS'))::INTERVAL;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.build_interval(_rrule.RRULE)
RETURNS INTERVAL AS $$
SELECT _rrule.build_interval(COALESCE($1."interval", 1), $1."freq");
$$ LANGUAGE SQL IMMUTABLE STRICT;
-- rrule containment.
-- intervals must be compatible.
-- wkst must match
-- all other fields must have $2's value(s) in $1.
CREATE OR REPLACE FUNCTION _rrule.contains(_rrule.RRULE, _rrule.RRULE)
RETURNS BOOLEAN AS $$
SELECT _rrule.interval_contains(
_rrule.build_interval($1),
_rrule.build_interval($2)
) AND COALESCE($1."wkst" = $2."wkst", true);
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.contained_by(_rrule.RRULE, _rrule.RRULE)
RETURNS BOOLEAN AS $$
SELECT _rrule.contains($2, $1);
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.until("rrule" _rrule.RRULE, "dtstart" TIMESTAMP)
RETURNS TIMESTAMP AS $$
SELECT min("until")
FROM (
SELECT "rrule"."until"
UNION
SELECT "dtstart" + _rrule.build_interval("rrule"."interval", "rrule"."freq") * COALESCE("rrule"."count", CASE WHEN "rrule"."until" IS NOT NULL THEN NULL ELSE 1 END) AS "until"
) "until" GROUP BY ();
$$ LANGUAGE SQL IMMUTABLE STRICT;
COMMENT ON FUNCTION _rrule.until(_rrule.RRULE, TIMESTAMP) IS 'The calculated "until"" timestamp for the given rrule+dtstart';
-- For example, a YEARLY rule that repeats on first and third month have 2 start values.
CREATE OR REPLACE FUNCTION _rrule.all_starts(
"rrule" _rrule.RRULE,
"dtstart" TIMESTAMP
) RETURNS SETOF TIMESTAMP AS $$
DECLARE
months int[];
hour int := EXTRACT(HOUR FROM "dtstart")::integer;
minute int := EXTRACT(MINUTE FROM "dtstart")::integer;
second double precision := EXTRACT(SECOND FROM "dtstart");
day int := EXTRACT(DAY FROM "dtstart")::integer;
month int := EXTRACT(MONTH FROM "dtstart")::integer;
year int := EXTRACT(YEAR FROM "dtstart")::integer;
year_start timestamp := make_timestamp(year, 1, 1, hour, minute, second);
year_end timestamp := make_timestamp(year, 12, 31, hour, minute, second);
interv INTERVAL := _rrule.build_interval("rrule");
BEGIN
RETURN QUERY WITH
"year" as (SELECT EXTRACT(YEAR FROM "dtstart")::integer AS "year"),
A10 as (
SELECT
make_timestamp(
"year"."year",
COALESCE("bymonth", month),
COALESCE("bymonthday", day),
COALESCE("byhour", hour),
COALESCE("byminute", minute),
COALESCE("bysecond", second)
) as "ts"
FROM "year"
LEFT OUTER JOIN unnest(("rrule")."bymonth") AS "bymonth" ON (true)
LEFT OUTER JOIN unnest(("rrule")."bymonthday") as "bymonthday" ON (true)
LEFT OUTER JOIN unnest(("rrule")."byhour") AS "byhour" ON (true)
LEFT OUTER JOIN unnest(("rrule")."byminute") AS "byminute" ON (true)
LEFT OUTER JOIN unnest(("rrule")."bysecond") AS "bysecond" ON (true)
),
A11 as (
SELECT DISTINCT "ts"
FROM A10
UNION
SELECT "ts" FROM (
SELECT "ts"
FROM generate_series("dtstart", year_end, INTERVAL '1 day') "ts"
WHERE (
"ts"::_rrule.DAY = ANY("rrule"."byday")
)
AND "ts" <= ("dtstart" + INTERVAL '7 days')
) as "ts"
UNION
SELECT "ts" FROM (
SELECT "ts"
FROM generate_series("dtstart", year_end, INTERVAL '1 day') "ts"
WHERE (
EXTRACT(DAY FROM "ts") = ANY("rrule"."bymonthday")
)
AND "ts" <= ("dtstart" + INTERVAL '2 months')
) as "ts"
UNION
SELECT "ts" FROM (
SELECT "ts"
FROM generate_series("dtstart", "dtstart" + INTERVAL '1 year', INTERVAL '1 month') "ts"
WHERE (
EXTRACT(MONTH FROM "ts") = ANY("rrule"."bymonth")
)
) as "ts"
)
SELECT DISTINCT "ts"
FROM A11
WHERE (
"rrule"."byday" IS NULL OR "ts"::_rrule.DAY = ANY("rrule"."byday")
)
AND (
"rrule"."bymonth" IS NULL OR EXTRACT(MONTH FROM "ts") = ANY("rrule"."bymonth")
)
AND (
"rrule"."bymonthday" IS NULL OR EXTRACT(DAY FROM "ts") = ANY("rrule"."bymonthday")
)
ORDER BY "ts";
END;
$$ LANGUAGE plpgsql STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.validate_rrule (result _rrule.RRULE)
RETURNS void AS $$
BEGIN
-- FREQ is required
IF result."freq" IS NULL THEN
RAISE EXCEPTION 'FREQ cannot be null';
END IF;
-- FREQ=YEARLY required if BYWEEKNO is provided
IF result."byweekno" IS NOT NULL AND result."freq" != 'YEARLY' THEN
RAISE EXCEPTION 'FREQ must be YEARLY if BYWEEKNO is provided.';
END IF;
-- Limits on FREQ if byyearday is selected
IF (result."freq" <> 'YEARLY' AND result."byyearday" IS NOT NULL) THEN
RAISE EXCEPTION 'BYYEARDAY is only valid when FREQ is YEARLY.';
END IF;
IF (result."freq" = 'WEEKLY' AND result."bymonthday" IS NOT NULL) THEN
RAISE EXCEPTION 'BYMONTHDAY is not valid when FREQ is WEEKLY.';
END IF;
-- BY[something-else] is required if BYSETPOS is set.
IF (result."bysetpos" IS NOT NULL AND result."bymonth" IS NULL AND result."byweekno" IS NULL AND result."byyearday" IS NULL AND result."bymonthday" IS NULL AND result."byday" IS NULL AND result."byhour" IS NULL AND result."byminute" IS NULL AND result."bysecond" IS NULL) THEN
RAISE EXCEPTION 'BYSETPOS requires at least one other BY*';
END IF;
IF result."freq" = 'DAILY' AND result."byday" IS NOT NULL THEN
RAISE EXCEPTION 'BYDAY is not valid when FREQ is DAILY.';
END IF;
IF result."until" IS NOT NULL AND result."count" IS NOT NULL THEN
RAISE EXCEPTION 'UNTIL and COUNT MUST NOT occur in the same recurrence.';
END IF;
IF result."interval" IS NOT NULL THEN
IF (NOT result."interval" > 0) THEN
RAISE EXCEPTION 'INTERVAL must be a non-zero integer.';
END IF;
END IF;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;CREATE OR REPLACE FUNCTION _rrule.rrule (TEXT)
RETURNS _rrule.RRULE AS $$
DECLARE
result _rrule.RRULE;
BEGIN
WITH "tokens" AS (
WITH A20 as (SELECT _rrule.parse_line($1::text, 'RRULE') "r"),
-- Split each key value pair into an array, e.g. {'FREQ', 'DAILY'}
A30 as (SELECT regexp_split_to_array("r", '=') AS "y" FROM A20)
SELECT "y"[1] AS "key", "y"[2] AS "val" FROM A30
),
candidate AS (
SELECT
(SELECT "val"::_rrule.FREQ FROM "tokens" WHERE "key" = 'FREQ') AS "freq",
(SELECT "val"::INTEGER FROM "tokens" WHERE "key" = 'INTERVAL') AS "interval",
(SELECT "val"::INTEGER FROM "tokens" WHERE "key" = 'COUNT') AS "count",
(SELECT "val"::TIMESTAMP FROM "tokens" WHERE "key" = 'UNTIL') AS "until",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYSECOND') AS "bysecond",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYMINUTE') AS "byminute",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYHOUR') AS "byhour",
(SELECT _rrule.day_array("val") FROM "tokens" WHERE "key" = 'BYDAY') AS "byday",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYMONTHDAY') AS "bymonthday",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYYEARDAY') AS "byyearday",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYWEEKNO') AS "byweekno",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYMONTH') AS "bymonth",
(SELECT _rrule.integer_array("val") FROM "tokens" WHERE "key" = 'BYSETPOS') AS "bysetpos",
(SELECT "val"::_rrule.DAY FROM "tokens" WHERE "key" = 'WKST') AS "wkst"
)
SELECT
"freq",
-- Default value for INTERVAL
COALESCE("interval", 1) AS "interval",
"count",
"until",
"bysecond",
"byminute",
"byhour",
"byday",
"bymonthday",
"byyearday",
"byweekno",
"bymonth",
"bysetpos",
-- DEFAULT value for wkst
COALESCE("wkst", 'MO') AS "wkst"
INTO result
FROM candidate;
PERFORM _rrule.validate_rrule(result);
RETURN result;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.text(_rrule.RRULE)
RETURNS TEXT AS $$
SELECT regexp_replace(
'RRULE:'
|| COALESCE('FREQ=' || $1."freq" || ';', '')
|| CASE WHEN $1."interval" = 1 THEN '' ELSE COALESCE('INTERVAL=' || $1."interval" || ';', '') END
|| COALESCE('COUNT=' || $1."count" || ';', '')
|| COALESCE('UNTIL=' || $1."until" || ';', '')
|| COALESCE('BYSECOND=' || _rrule.array_join($1."bysecond", ',') || ';', '')
|| COALESCE('BYMINUTE=' || _rrule.array_join($1."byminute", ',') || ';', '')
|| COALESCE('BYHOUR=' || _rrule.array_join($1."byhour", ',') || ';', '')
|| COALESCE('BYDAY=' || _rrule.array_join($1."byday", ',') || ';', '')
|| COALESCE('BYMONTHDAY=' || _rrule.array_join($1."bymonthday", ',') || ';', '')
|| COALESCE('BYYEARDAY=' || _rrule.array_join($1."byyearday", ',') || ';', '')
|| COALESCE('BYWEEKNO=' || _rrule.array_join($1."byweekno", ',') || ';', '')
|| COALESCE('BYMONTH=' || _rrule.array_join($1."bymonth", ',') || ';', '')
|| COALESCE('BYSETPOS=' || _rrule.array_join($1."bysetpos", ',') || ';', '')
|| CASE WHEN $1."wkst" = 'MO' THEN '' ELSE COALESCE('WKST=' || $1."wkst" || ';', '') END
, ';$', '');
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset (TEXT)
RETURNS _rrule.RRULESET AS $$
WITH "dtstart-line" AS (SELECT _rrule.parse_line($1::text, 'DTSTART') as "x"),
"dtend-line" AS (SELECT _rrule.parse_line($1::text, 'DTEND') as "x"),
"exrule-line" AS (SELECT _rrule.parse_line($1::text, 'EXRULE') as "x")
SELECT
(SELECT "x"::timestamp FROM "dtstart-line" LIMIT 1) AS "dtstart",
(SELECT "x"::timestamp FROM "dtend-line" LIMIT 1) AS "dtend",
(SELECT _rrule.rrule($1::text) "rrule") as "rrule",
(SELECT _rrule.rrule("x"::text) "rrule" FROM "exrule-line") as "exrule",
NULL::TIMESTAMP[] "rdate",
NULL::TIMESTAMP[] "exdate";
$$ LANGUAGE SQL IMMUTABLE STRICT;
-- All of the function(rrule, ...) forms also accept a text argument, which will
-- be parsed using the RFC-compliant parser.
CREATE OR REPLACE FUNCTION _rrule.is_finite("rrule" _rrule.RRULE)
RETURNS BOOLEAN AS $$
SELECT "rrule"."count" IS NOT NULL OR "rrule"."until" IS NOT NULL;
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.is_finite("rrule" TEXT)
RETURNS BOOLEAN AS $$
SELECT _rrule.is_finite(_rrule.rrule("rrule"));
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.is_finite("rruleset" _rrule.RRULESET)
RETURNS BOOLEAN AS $$
SELECT _rrule.is_finite("rruleset"."rrule")
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.is_finite("rruleset_array" _rrule.RRULESET[])
RETURNS BOOLEAN AS $$
DECLARE
item _rrule.RRULESET;
BEGIN
FOREACH item IN ARRAY "rruleset_array" LOOP
IF (SELECT _rrule.is_finite(item)) THEN
RETURN true;
END IF;
END LOOP;
RETURN false;
END;
$$ LANGUAGE plpgsql STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences(
"rrule" _rrule.RRULE,
"dtstart" TIMESTAMP
)
RETURNS SETOF TIMESTAMP AS $$
WITH "starts" AS (
SELECT "start"
FROM _rrule.all_starts($1, $2) "start"
),
"params" AS (
SELECT
"until",
"interval"
FROM _rrule.until($1, $2) "until"
FULL OUTER JOIN _rrule.build_interval($1) "interval" ON (true)
),
"generated" AS (
SELECT generate_series("start", "until", "interval") "occurrence"
FROM "params"
FULL OUTER JOIN "starts" ON (true)
),
"ordered" AS (
SELECT DISTINCT "occurrence"
FROM "generated"
WHERE "occurrence" >= "dtstart"
ORDER BY "occurrence"
),
"tagged" AS (
SELECT
row_number() OVER (),
"occurrence"
FROM "ordered"
)
SELECT "occurrence"
FROM "tagged"
WHERE "row_number" <= "rrule"."count"
OR "rrule"."count" IS NULL
ORDER BY "occurrence";
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences("rrule" _rrule.RRULE, "dtstart" TIMESTAMP, "between" TSRANGE)
RETURNS SETOF TIMESTAMP AS $$
SELECT "occurrence"
FROM _rrule.occurrences("rrule", "dtstart") "occurrence"
WHERE "occurrence" <@ "between";
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences("rrule" TEXT, "dtstart" TIMESTAMP, "between" TSRANGE)
RETURNS SETOF TIMESTAMP AS $$
SELECT "occurrence"
FROM _rrule.occurrences(_rrule.rrule("rrule"), "dtstart") "occurrence"
WHERE "occurrence" <@ "between";
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences(
"rruleset" _rrule.RRULESET,
"tsrange" TSRANGE
)
RETURNS SETOF TIMESTAMP AS $$
WITH "rrules" AS (
SELECT
"rruleset"."dtstart",
"rruleset"."dtend",
"rruleset"."rrule"
),
"rdates" AS (
SELECT _rrule.occurrences("rrule", "dtstart", "tsrange") AS "occurrence"
FROM "rrules"
UNION
SELECT unnest("rruleset"."rdate") AS "occurrence"
),
"exrules" AS (
SELECT
"rruleset"."dtstart",
"rruleset"."dtend",
"rruleset"."exrule"
),
"exdates" AS (
SELECT _rrule.occurrences("exrule", "dtstart", "tsrange") AS "occurrence"
FROM "exrules"
UNION
SELECT unnest("rruleset"."exdate") AS "occurrence"
)
SELECT "occurrence" FROM "rdates"
EXCEPT
SELECT "occurrence" FROM "exdates"
ORDER BY "occurrence";
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences("rruleset" _rrule.RRULESET)
RETURNS SETOF TIMESTAMP AS $$
SELECT _rrule.occurrences("rruleset", '(,)'::TSRANGE);
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.occurrences(
"rruleset_array" _rrule.RRULESET[],
"tsrange" TSRANGE
-- TODO: add a default limit and then use that limit from `first` and `last`
)
RETURNS SETOF TIMESTAMP AS $$
DECLARE
i int;
lim int;
q text := '';
BEGIN
lim := array_length("rruleset_array", 1);
IF lim IS NULL THEN
q := 'VALUES (NULL::TIMESTAMP) LIMIT 0;';
ELSE
FOR i IN 1..lim
LOOP
q := q || $q$SELECT _rrule.occurrences('$q$ || "rruleset_array"[i] ||$q$'::_rrule.RRULESET, '$q$ || "tsrange" ||$q$'::TSRANGE)$q$;
IF i != lim THEN
q := q || ' UNION ';
END IF;
END LOOP;
q := q || ' ORDER BY occurrences ASC';
END IF;
RETURN QUERY EXECUTE q;
END;
$$ LANGUAGE plpgsql STRICT IMMUTABLE;CREATE OR REPLACE FUNCTION _rrule.first("rrule" _rrule.RRULE, "dtstart" TIMESTAMP)
RETURNS TIMESTAMP AS $$
BEGIN
RETURN (SELECT "ts"
FROM _rrule.all_starts("rrule", "dtstart") "ts"
WHERE "ts" >= "dtstart"
ORDER BY "ts" ASC
LIMIT 1);
END;
$$ LANGUAGE plpgsql STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.first("rrule" TEXT, "dtstart" TIMESTAMP)
RETURNS TIMESTAMP AS $$
SELECT _rrule.first(_rrule.rrule("rrule"), "dtstart");
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.first("rruleset" _rrule.RRULESET)
RETURNS TIMESTAMP AS $$
SELECT occurrence
FROM _rrule.occurrences("rruleset") occurrence
ORDER BY occurrence ASC LIMIT 1;
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.first("rruleset_array" _rrule.RRULESET[])
RETURNS TIMESTAMP AS $$
SELECT occurrence
FROM _rrule.occurrences("rruleset_array", '(,)'::TSRANGE) occurrence
ORDER BY occurrence ASC LIMIT 1;
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.last("rrule" _rrule.RRULE, "dtstart" TIMESTAMP)
RETURNS TIMESTAMP AS $$
SELECT occurrence
FROM _rrule.occurrences("rrule", "dtstart") occurrence
ORDER BY occurrence DESC LIMIT 1;
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.last("rrule" TEXT, "dtstart" TIMESTAMP)
RETURNS TIMESTAMP AS $$
SELECT _rrule.last(_rrule.rrule("rrule"), "dtstart");
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.last("rruleset" _rrule.RRULESET)
RETURNS TIMESTAMP AS $$
SELECT occurrence
FROM _rrule.occurrences("rruleset") occurrence
ORDER BY occurrence DESC LIMIT 1;
$$ LANGUAGE SQL STRICT IMMUTABLE;
-- TODO: Ensure to check whether the range is finite. If not, we should return null
-- or something meaningful.
CREATE OR REPLACE FUNCTION _rrule.last("rruleset_array" _rrule.RRULESET[])
RETURNS SETOF TIMESTAMP AS $$
BEGIN
IF (SELECT _rrule.is_finite("rruleset_array")) THEN
RETURN QUERY SELECT occurrence
FROM _rrule.occurrences("rruleset_array", '(,)'::TSRANGE) occurrence
ORDER BY occurrence DESC LIMIT 1;
ELSE
RETURN QUERY SELECT NULL::TIMESTAMP;
END IF;
END;
$$ LANGUAGE plpgsql STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.before(
"rrule" _rrule.RRULE,
"dtstart" TIMESTAMP,
"when" TIMESTAMP
)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rrule", "dtstart", tsrange(NULL, "when", '[]'));
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.before("rrule" TEXT, "dtstart" TIMESTAMP, "when" TIMESTAMP)
RETURNS SETOF TIMESTAMP AS $$
SELECT _rrule.before(_rrule.rrule("rrule"), "dtstart", "when");
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.before("rruleset" _rrule.RRULESET, "when" TIMESTAMP)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rruleset", tsrange(NULL, "when", '[]'));
$$ LANGUAGE SQL STRICT IMMUTABLE;
-- TODO: test
CREATE OR REPLACE FUNCTION _rrule.before("rruleset_array" _rrule.RRULESET[], "when" TIMESTAMP)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rruleset_array", tsrange(NULL, "when", '[]'));
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.after(
"rrule" _rrule.RRULE,
"dtstart" TIMESTAMP,
"when" TIMESTAMP
)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rrule", "dtstart", tsrange("when", NULL));
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.after(
"rrule" TEXT,
"dtstart" TIMESTAMP,
"when" TIMESTAMP
)
RETURNS SETOF TIMESTAMP AS $$
SELECT _rrule.after(_rrule.rrule("rrule"), "dtstart", "when");
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.after("rruleset" _rrule.RRULESET, "when" TIMESTAMP)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rruleset", tsrange("when", NULL));
$$ LANGUAGE SQL STRICT IMMUTABLE;
-- TODO: test
CREATE OR REPLACE FUNCTION _rrule.after("rruleset_array" _rrule.RRULESET[], "when" TIMESTAMP)
RETURNS SETOF TIMESTAMP AS $$
SELECT *
FROM _rrule.occurrences("rruleset_array", tsrange("when", NULL));
$$ LANGUAGE SQL STRICT IMMUTABLE;
CREATE OR REPLACE FUNCTION _rrule.contains_timestamp(_rrule.RRULESET, TIMESTAMP)
RETURNS BOOLEAN AS $$
DECLARE
inSet boolean;
BEGIN
-- TODO: Not sure what how this is finding a timestamp that is contained
-- by the rruleset.
SELECT COUNT(*) > 0
INTO inSet
FROM _rrule.after($1, $2 - INTERVAL '1 month') "ts"
WHERE "ts"::date = $2::date;
RETURN inSet;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.jsonb_to_rrule("input" jsonb)
RETURNS _rrule.RRULE AS $$
DECLARE
result _rrule.RRULE;
BEGIN
IF (SELECT count(*) = 0 FROM jsonb_object_keys("input") WHERE "input"::TEXT <> 'null') THEN
RETURN NULL;
END IF;
SELECT
"freq",
-- Default value for INTERVAL
COALESCE("interval", 1) AS "interval",
"count",
"until",
"bysecond",
"byminute",
"byhour",
"byday",
"bymonthday",
"byyearday",
"byweekno",
"bymonth",
"bysetpos",
-- DEFAULT value for wkst
COALESCE("wkst", 'MO') AS "wkst"
INTO result
FROM jsonb_to_record("input") as x(
"freq" _rrule.FREQ,
"interval" integer,
"count" INTEGER,
"until" text,
"bysecond" integer[],
"byminute" integer[],
"byhour" integer[],
"byday" text[],
"bymonthday" integer[],
"byyearday" integer[],
"byweekno" integer[],
"bymonth" integer[],
"bysetpos" integer[],
"wkst" _rrule.DAY
);
PERFORM _rrule.validate_rrule(result);
RETURN result;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.jsonb_to_rruleset("input" jsonb)
RETURNS _rrule.RRULESET AS $$
DECLARE
result _rrule.RRULESET;
BEGIN
SELECT
"dtstart"::TIMESTAMP,
"dtend"::TIMESTAMP,
_rrule.jsonb_to_rrule("rrule") "rrule",
_rrule.jsonb_to_rrule("exrule") "exrule",
"rdate"::TIMESTAMP[],
"exdate"::TIMESTAMP[]
INTO result
FROM jsonb_to_record("input") as x(
"dtstart" text,
"dtend" text,
"rrule" jsonb,
"exrule" jsonb,
"rdate" text[],
"exdate" text[]
);
-- TODO: validate rruleset
RETURN result;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.jsonb_to_rruleset_array("input" jsonb)
RETURNS _rrule.RRULESET[] AS $$
DECLARE
item jsonb;
out _rrule.RRULESET[] := '{}'::_rrule.RRULESET[];
BEGIN
FOR item IN SELECT * FROM jsonb_array_elements("input")
LOOP
out := (SELECT out || _rrule.jsonb_to_rruleset(item));
END LOOP;
RETURN out;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rrule_to_jsonb("input" _rrule.RRULE)
RETURNS jsonb AS $$
BEGIN
RETURN jsonb_strip_nulls(jsonb_build_object(
'freq', "input"."freq",
'interval', "input"."interval",
'count', "input"."count",
'until', "input"."until",
'bysecond', "input"."bysecond",
'byminute', "input"."byminute",
'byhour', "input"."byhour",
'byday', "input"."byday",
'bymonthday', "input"."bymonthday",
'byyearday', "input"."byyearday",
'byweekno', "input"."byweekno",
'bymonth', "input"."bymonth",
'bysetpos', "input"."bysetpos",
'wkst', "input"."wkst"
));
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_to_jsonb("input" _rrule.RRULESET)
RETURNS jsonb AS $$
DECLARE
rrule jsonb;
exrule jsonb;
BEGIN
SELECT _rrule.rrule_to_jsonb("input"."rrule")
INTO rrule;
SELECT _rrule.rrule_to_jsonb("input"."exrule")
INTO exrule;
RETURN jsonb_strip_nulls(jsonb_build_object(
'dtstart', "input"."dtstart",
'dtend', "input"."dtend",
'rrule', rrule,
'exrule', exrule,
'rdate', "input"."rdate",
'exdate', "input"."exdate"
));
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_array_to_jsonb("input" _rrule.RRULESET[])
RETURNS jsonb AS $$
DECLARE
item _rrule.RRULESET;
out jsonb := '[]'::jsonb;
BEGIN
FOREACH item IN ARRAY "input" LOOP
out := (SELECT out || _rrule.rruleset_to_jsonb(item));
END LOOP;
RETURN out;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_array_contains_timestamp(_rrule.RRULESET[], TIMESTAMP)
RETURNS BOOLEAN AS $$
DECLARE
item _rrule.RRULESET;
BEGIN
FOREACH item IN ARRAY $1
LOOP
IF (SELECT _rrule.contains_timestamp(item, $2)) THEN
RETURN true;
END IF;
END LOOP;
RETURN false;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_has_after_timestamp(_rrule.RRULESET, TIMESTAMP)
RETURNS BOOLEAN AS $$
SELECT count(*) > 0 FROM _rrule.after($1, $2) LIMIT 1;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_has_before_timestamp(_rrule.RRULESET, TIMESTAMP)
RETURNS BOOLEAN AS $$
SELECT count(*) > 0 FROM _rrule.before($1, $2) LIMIT 1;
$$ LANGUAGE SQL IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_array_has_after_timestamp(_rrule.RRULESET[], TIMESTAMP)
RETURNS BOOLEAN AS $$
DECLARE
item _rrule.RRULESET;
BEGIN
FOREACH item IN ARRAY $1
LOOP
IF (SELECT count(*) > 0 FROM _rrule.after(item, $2) LIMIT 1) THEN
RETURN true;
END IF;
END LOOP;
RETURN false;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE FUNCTION _rrule.rruleset_array_has_before_timestamp(_rrule.RRULESET[], TIMESTAMP)
RETURNS BOOLEAN AS $$
DECLARE
item _rrule.RRULESET;
BEGIN
FOREACH item IN ARRAY $1
LOOP
IF (SELECT count(*) > 0 FROM _rrule.before(item, $2) LIMIT 1) THEN
RETURN true;
END IF;
END LOOP;
RETURN false;
END;
$$ LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OPERATOR = (
LEFTARG = _rrule.RRULE,
RIGHTARG = _rrule.RRULE,
PROCEDURE = _rrule.compare_equal,
NEGATOR = <>,
COMMUTATOR = =
);
CREATE OPERATOR <> (
LEFTARG = _rrule.RRULE,
RIGHTARG = _rrule.RRULE,
PROCEDURE = _rrule.compare_not_equal,
NEGATOR = =,
COMMUTATOR = <>
);
CREATE OPERATOR @> (
LEFTARG = _rrule.RRULE,
RIGHTARG = _rrule.RRULE,
PROCEDURE = _rrule.contains,
COMMUTATOR = <@
);
CREATE OPERATOR <@ (
LEFTARG = _rrule.RRULE,
RIGHTARG = _rrule.RRULE,
PROCEDURE = _rrule.contained_by,
COMMUTATOR = @>
);
CREATE OPERATOR @> (
LEFTARG = _rrule.RRULESET,
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.contains_timestamp
);
CREATE OPERATOR @> (
LEFTARG = _rrule.RRULESET[],
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.rruleset_array_contains_timestamp
);
CREATE OPERATOR > (
LEFTARG = _rrule.RRULESET[],
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.rruleset_array_has_after_timestamp
);
CREATE OPERATOR < (
LEFTARG = _rrule.RRULESET[],
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.rruleset_array_has_before_timestamp
);
CREATE OPERATOR > (
LEFTARG = _rrule.RRULESET,
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.rruleset_has_after_timestamp
);
CREATE OPERATOR < (
LEFTARG = _rrule.RRULESET,
RIGHTARG = TIMESTAMP,
PROCEDURE = _rrule.rruleset_has_before_timestamp
);
CREATE CAST (TEXT AS _rrule.RRULE)
WITH FUNCTION _rrule.rrule(TEXT)
AS IMPLICIT;
CREATE CAST (TEXT AS _rrule.RRULESET)
WITH FUNCTION _rrule.rruleset(TEXT)
AS IMPLICIT;
CREATE CAST (jsonb AS _rrule.RRULE)
WITH FUNCTION _rrule.jsonb_to_rrule(jsonb)
AS IMPLICIT;
CREATE CAST (_rrule.RRULE AS jsonb)
WITH FUNCTION _rrule.rrule_to_jsonb(_rrule.RRULE)
AS IMPLICIT; | the_stack |
-- 12.02.2016 18:09
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process (AccessLevel,AD_Client_ID,AD_Org_ID,AD_Process_ID,AllowProcessReRun,Classname,CopyFromProcess,Created,CreatedBy,EntityType,IsActive,IsBetaFunctionality,IsDirectPrint,IsOneInstanceOnly,IsReport,IsServerProcess,LockWaitTimeout,Name,RefreshAllAfterExecution,ShowHelp,Statistic_Count,Statistic_Seconds,Type,Updated,UpdatedBy,Value) VALUES ('7',0,0,540657,'Y','de.metas.product.process.M_Product_Create_Mappings_Process','N',TO_TIMESTAMP('2016-02-12 18:09:12','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','N','N','N','N',0,'M_Product_Create_Mappings_Process','N','Y',0,0,'Java',TO_TIMESTAMP('2016-02-12 18:09:12','YYYY-MM-DD HH24:MI:SS'),100,'10000002')
;
-- 12.02.2016 18:09
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Trl (AD_Language,AD_Process_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_ID=540657 AND NOT EXISTS (SELECT * FROM AD_Process_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_ID=t.AD_Process_ID)
;
-- 12.02.2016 18:10
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element (AD_Client_ID,AD_Element_ID,AD_Org_ID,ColumnName,Created,CreatedBy,EntityType,IsActive,Name,PrintName,Updated,UpdatedBy) VALUES (0,542976,0,'AD_Org_Target_ID',TO_TIMESTAMP('2016-02-12 18:10:25','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','AD_Org_Target_ID','AD_Org_Target_ID',TO_TIMESTAMP('2016-02-12 18:10:25','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 18:10
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element_Trl (AD_Language,AD_Element_ID, Description,Help,Name,PO_Description,PO_Help,PO_Name,PO_PrintName,PrintName, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Element_ID, t.Description,t.Help,t.Name,t.PO_Description,t.PO_Help,t.PO_Name,t.PO_PrintName,t.PrintName, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Element t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Element_ID=542976 AND NOT EXISTS (SELECT * FROM AD_Element_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Element_ID=t.AD_Element_ID)
;
-- 12.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference (AD_Client_ID,AD_Org_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,IsOrderByValue,Name,Updated,UpdatedBy,ValidationType) VALUES (0,0,540637,TO_TIMESTAMP('2016-02-12 18:12:05','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','AD_Org_Different_From_Current',TO_TIMESTAMP('2016-02-12 18:12:05','YYYY-MM-DD HH24:MI:SS'),100,'T')
;
-- 12.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference_Trl (AD_Language,AD_Reference_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Reference_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Reference t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Reference_ID=540637 AND NOT EXISTS (SELECT * FROM AD_Reference_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Reference_ID=t.AD_Reference_ID)
;
-- 12.02.2016 18:13
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_Table (AD_Client_ID,AD_Display,AD_Key,AD_Org_ID,AD_Reference_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,IsValueDisplayed,OrderByClause,Updated,UpdatedBy,WhereClause) VALUES (0,522,528,0,540637,155,TO_TIMESTAMP('2016-02-12 18:13:02','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','AD_Org.Value',TO_TIMESTAMP('2016-02-12 18:13:02','YYYY-MM-DD HH24:MI:SS'),100,NULL)
;
-- 12.02.2016 18:14
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='AD_Org.AD_Org_ID<>@AD_Org_ID/-1@',Updated=TO_TIMESTAMP('2016-02-12 18:14:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540637
;
-- 12.02.2016 18:14
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,AD_Reference_Value_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542976,0,540657,540897,18,540637,'AD_Org_Target_ID',TO_TIMESTAMP('2016-02-12 18:14:22','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product',0,'Y','N','Y','N','N','N','AD_Org_Target_ID',10,TO_TIMESTAMP('2016-02-12 18:14:22','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 18:14
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540897 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 12.02.2016 18:14
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsMandatory='Y',Updated=TO_TIMESTAMP('2016-02-12 18:14:28','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540897
;
-- 12.02.2016 18:15
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element (AD_Client_ID,AD_Element_ID,AD_Org_ID,ColumnName,Created,CreatedBy,EntityType,IsActive,Name,PrintName,Updated,UpdatedBy) VALUES (0,542977,0,'M_Product_Target_ID',TO_TIMESTAMP('2016-02-12 18:15:06','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','M_Product_Target_ID','M_Product_Target_ID',TO_TIMESTAMP('2016-02-12 18:15:06','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 18:15
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element_Trl (AD_Language,AD_Element_ID, Description,Help,Name,PO_Description,PO_Help,PO_Name,PO_PrintName,PrintName, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Element_ID, t.Description,t.Help,t.Name,t.PO_Description,t.PO_Help,t.PO_Name,t.PO_PrintName,t.PrintName, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Element t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Element_ID=542977 AND NOT EXISTS (SELECT * FROM AD_Element_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Element_ID=t.AD_Element_ID)
;
-- 12.02.2016 18:15
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542977,0,540657,540898,18,'M_Product_Target_ID',TO_TIMESTAMP('2016-02-12 18:15:34','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product',0,'Y','N','Y','N','N','N','M_Product_Target_ID',20,TO_TIMESTAMP('2016-02-12 18:15:34','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 18:15
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540898 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 12.02.2016 18:16
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference (AD_Client_ID,AD_Org_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,IsOrderByValue,Name,Updated,UpdatedBy,ValidationType) VALUES (0,0,540638,TO_TIMESTAMP('2016-02-12 18:16:18','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','M_Product_Of_Org',TO_TIMESTAMP('2016-02-12 18:16:18','YYYY-MM-DD HH24:MI:SS'),100,'T')
;
-- 12.02.2016 18:16
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference_Trl (AD_Language,AD_Reference_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Reference_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Reference t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Reference_ID=540638 AND NOT EXISTS (SELECT * FROM AD_Reference_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Reference_ID=t.AD_Reference_ID)
;
-- 12.02.2016 18:17
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_Table (AD_Client_ID,AD_Key,AD_Org_ID,AD_Reference_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,IsValueDisplayed,Updated,UpdatedBy) VALUES (0,1402,0,540638,208,TO_TIMESTAMP('2016-02-12 18:17:00','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N',TO_TIMESTAMP('2016-02-12 18:17:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 15.02.2016 11:20
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540511,Updated=TO_TIMESTAMP('2016-02-15 11:20:14','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 15.02.2016 11:20
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process (AccessLevel,AD_Client_ID,AD_Org_ID,AD_Process_ID,AllowProcessReRun,Classname,CopyFromProcess,Created,CreatedBy,EntityType,IsActive,IsBetaFunctionality,IsDirectPrint,IsOneInstanceOnly,IsReport,IsServerProcess,LockWaitTimeout,Name,RefreshAllAfterExecution,ShowHelp,Statistic_Count,Statistic_Seconds,Type,Updated,UpdatedBy,Value) VALUES ('7',0,0,540658,'Y','/de.metas.adempiere.adempiere.base/src/main/java/de/metas/product/process/M_Product_Remove_Mapping_Process.java','N',TO_TIMESTAMP('2016-02-15 11:20:46','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','N','N','N','N',0,'M_Product_Remove_Mapping_Process','N','Y',0,0,'Java',TO_TIMESTAMP('2016-02-15 11:20:46','YYYY-MM-DD HH24:MI:SS'),100,'10000003')
;
-- 15.02.2016 11:20
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Trl (AD_Language,AD_Process_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_ID=540658 AND NOT EXISTS (SELECT * FROM AD_Process_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_ID=t.AD_Process_ID)
;
-- 16.02.2016 11:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,DefaultValue,Description,DisplayLogic,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,454,0,540657,540899,19,'M_Product_ID',TO_TIMESTAMP('2016-02-16 11:34:45','YYYY-MM-DD HH24:MI:SS'),100,'@M_Product_ID@','Produkt, Leistung, Artikel','1=2','de.metas.product',0,'Bezeichnet eine Einheit, die in dieser Organisation gekauft oder verkauft wird.','Y','N','Y','N','Y','N','Produkt',30,TO_TIMESTAMP('2016-02-16 11:34:45','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 11:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540899 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 16.02.2016 11:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DisplayLogic=NULL, ReadOnlyLogic='1=1',Updated=TO_TIMESTAMP('2016-02-16 11:53:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540899
;
-- 16.02.2016 11:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=10,Updated=TO_TIMESTAMP('2016-02-16 11:53:56','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540899
;
-- 16.02.2016 11:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-02-16 11:53:56','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540897
;
-- 16.02.2016 11:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=30,Updated=TO_TIMESTAMP('2016-02-16 11:53:56','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 11:56
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DefaultValue='M_Product.M_Product_ID',Updated=TO_TIMESTAMP('2016-02-16 11:56:44','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540899
;
-- 16.02.2016 12:01
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para_Trl WHERE AD_Process_Para_ID=540899
;
-- 16.02.2016 12:01
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para WHERE AD_Process_Para_ID=540899
;
-- 16.02.2016 13:08
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=1402,Updated=TO_TIMESTAMP('2016-02-16 13:08:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:10
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET AD_Column_ID=553167, Parent_Column_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 13:10:03','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:18
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=1402,Updated=TO_TIMESTAMP('2016-02-16 13:18:48','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:21
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,DefaultValue,Description,DisplayLogic,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,454,0,540657,540900,19,'M_Product_ID',TO_TIMESTAMP('2016-02-16 13:21:30','YYYY-MM-DD HH24:MI:SS'),100,'@M_Product_ID@','Produkt, Leistung, Artikel','1=1','de.metas.product',0,'Bezeichnet eine Einheit, die in dieser Organisation gekauft oder verkauft wird.','Y','N','Y','N','Y','N','Produkt',40,TO_TIMESTAMP('2016-02-16 13:21:30','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 13:21
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540900 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 16.02.2016 13:28
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DisplayLogic='1=2',Updated=TO_TIMESTAMP('2016-02-16 13:28:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:30
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DisplayLogic=NULL, ReadOnlyLogic='1=1',Updated=TO_TIMESTAMP('2016-02-16 13:30:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET AD_Column_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 13:33:56','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:38
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET AD_Column_ID=553167, Parent_Column_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 13:38:30','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:43
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=30, AD_Val_Rule_ID=231,Updated=TO_TIMESTAMP('2016-02-16 13:43:39','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:45
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference (AD_Client_ID,AD_Org_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,IsOrderByValue,Name,Updated,UpdatedBy,ValidationType) VALUES (0,0,540639,TO_TIMESTAMP('2016-02-16 13:45:24','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','M_Product_Ctx',TO_TIMESTAMP('2016-02-16 13:45:24','YYYY-MM-DD HH24:MI:SS'),100,'T')
;
-- 16.02.2016 13:45
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference_Trl (AD_Language,AD_Reference_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Reference_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Reference t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Reference_ID=540639 AND NOT EXISTS (SELECT * FROM AD_Reference_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Reference_ID=t.AD_Reference_ID)
;
-- 16.02.2016 13:45
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_Table (AD_Client_ID,AD_Display,AD_Key,AD_Org_ID,AD_Reference_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,IsValueDisplayed,Updated,UpdatedBy) VALUES (0,1410,1402,0,540639,208,TO_TIMESTAMP('2016-02-16 13:45:51','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N',TO_TIMESTAMP('2016-02-16 13:45:51','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 13:46
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='exists
(select 1 from M_Product p on M_Product.M_Product_ID = p.M_Product_ID)',Updated=TO_TIMESTAMP('2016-02-16 13:46:21','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540639
;
-- 16.02.2016 13:46
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540639, AD_Val_Rule_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 13:46:46','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:47
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DefaultValue='select M_Product.M_Product_ID',Updated=TO_TIMESTAMP('2016-02-16 13:47:23','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:48
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 13:48:55','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:50
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540272, DefaultValue='@M_Product_ID@',Updated=TO_TIMESTAMP('2016-02-16 13:50:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 13:52
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET AD_Column_ID=NULL, Parent_Column_ID=1402,Updated=TO_TIMESTAMP('2016-02-16 13:52:23','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 13:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET AD_Column_ID=553167,Updated=TO_TIMESTAMP('2016-02-16 13:53:15','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:26
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsMandatory='N',Updated=TO_TIMESTAMP('2016-02-16 14:26:39','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 14:27
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='exists
(select 1 from M_Product p where M_Product.M_Product_ID = p.M_Product_ID)',Updated=TO_TIMESTAMP('2016-02-16 14:27:58','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540639
;
-- 16.02.2016 14:28
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540639,Updated=TO_TIMESTAMP('2016-02-16 14:28:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 14:30
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 14:30:18','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:41
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET IsCheckParentsChanged='Y', IsSearchActive='Y',Updated=TO_TIMESTAMP('2016-02-16 14:41:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:41
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=1402,Updated=TO_TIMESTAMP('2016-02-16 14:41:21','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:53
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 14:53:48','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:54
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Parent_Column_ID=1402,Updated=TO_TIMESTAMP('2016-02-16 14:54:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=540722
;
-- 16.02.2016 14:55
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DefaultValue='@#M_Product_ID@',Updated=TO_TIMESTAMP('2016-02-16 14:55:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 14:56
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DefaultValue='@SQL = SELECT M_Product_ID form M_Product',Updated=TO_TIMESTAMP('2016-02-16 14:56:58','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 15:36
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para_Trl WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 15:36
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para WHERE AD_Process_Para_ID=540900
;
-- 16.02.2016 15:37
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Table_Process WHERE AD_Process_ID=540657 AND AD_Table_ID=540704
;
-- 16.02.2016 15:37
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Table_Process (AD_Client_ID,AD_Org_ID,AD_Process_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,Updated,UpdatedBy) VALUES (0,0,540657,208,TO_TIMESTAMP('2016-02-16 15:37:25','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y',TO_TIMESTAMP('2016-02-16 15:37:25','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 15:38
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Reference SET Name='M_Product_Of_Org_Not_Mapped',Updated=TO_TIMESTAMP('2016-02-16 15:38:26','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:38
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Reference_Trl SET IsTranslated='N' WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:39
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540638,Updated=TO_TIMESTAMP('2016-02-16 15:39:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 15:40
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID@ AND M_Product.M_Product_Mapping_ID IS NULL',Updated=TO_TIMESTAMP('2016-02-16 15:40:18','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:42
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@ AND M_Product.M_Product_Mapping_ID IS NULL',Updated=TO_TIMESTAMP('2016-02-16 15:42:30','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:45
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET AD_Display=1410,Updated=TO_TIMESTAMP('2016-02-16 15:45:27','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@ ',Updated=TO_TIMESTAMP('2016-02-16 15:51:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540638
;
-- 16.02.2016 15:57
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsMandatory='Y',Updated=TO_TIMESTAMP('2016-02-16 15:57:10','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 15:57
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DisplayLogic='@AD_Org_Target_ID@ > 0',Updated=TO_TIMESTAMP('2016-02-16 15:57:28','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 15:59
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540272,Updated=TO_TIMESTAMP('2016-02-16 15:59:30','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 16:00
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Val_Rule (AD_Client_ID,AD_Org_ID,AD_Val_Rule_ID,Created,CreatedBy,EntityType,IsActive,Name,Type,Updated,UpdatedBy) VALUES (0,0,540318,TO_TIMESTAMP('2016-02-16 16:00:30','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','M_Product_Org_NotMapped','S',TO_TIMESTAMP('2016-02-16 16:00:30','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 16:00
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@ AND M_Product.M_Product_Mapping_ID IS NULL',Updated=TO_TIMESTAMP('2016-02-16 16:00:50','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540318
;
-- 16.02.2016 16:01
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Val_Rule_ID=540318,Updated=TO_TIMESTAMP('2016-02-16 16:01:03','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 16:02
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=30, AD_Reference_Value_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 16:02:29','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 16:02
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540272,Updated=TO_TIMESTAMP('2016-02-16 16:02:46','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 16:04
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@',Updated=TO_TIMESTAMP('2016-02-16 16:04:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540318
;
-- 16.02.2016 16:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsAutocomplete='Y',Updated=TO_TIMESTAMP('2016-02-16 16:06:11','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 16:11
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@
AND
(M_Product.M_Product_Mapping_ID IS NULL OR not exists
(select 1 from M_Product p where p.AD_Org_ID = @AD_Org_ID@ and p.M_Product_Mapping_ID =M_Product.M_Product_Mapping_ID ))',Updated=TO_TIMESTAMP('2016-02-16 16:11:37','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540318
;
-- 16.02.2016 16:38
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference_Trl WHERE AD_Reference_ID=540638
;
-- 16.02.2016 16:38
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference WHERE AD_Reference_ID=540638
;
-- 16.02.2016 16:39
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DisplayLogic=NULL,Updated=TO_TIMESTAMP('2016-02-16 16:39:10','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540898
;
-- 16.02.2016 17:58
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542976,0,540658,540901,19,'AD_Org_Target_ID',TO_TIMESTAMP('2016-02-16 17:58:34','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product',0,'Y','N','Y','N','N','N','AD_Org_Target_ID',10,TO_TIMESTAMP('2016-02-16 17:58:34','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 17:58
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540901 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 16.02.2016 17:59
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540637,Updated=TO_TIMESTAMP('2016-02-16 17:59:25','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540901
;
-- 16.02.2016 18:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference (AD_Client_ID,AD_Org_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,IsOrderByValue,Name,Updated,UpdatedBy,ValidationType) VALUES (0,0,540640,TO_TIMESTAMP('2016-02-16 18:07:55','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','AD_Org_Mapped_Products',TO_TIMESTAMP('2016-02-16 18:07:55','YYYY-MM-DD HH24:MI:SS'),100,'T')
;
-- 16.02.2016 18:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference_Trl (AD_Language,AD_Reference_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Reference_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Reference t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Reference_ID=540640 AND NOT EXISTS (SELECT * FROM AD_Reference_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Reference_ID=t.AD_Reference_ID)
;
-- 16.02.2016 18:08
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_Table (AD_Client_ID,AD_Display,AD_Key,AD_Org_ID,AD_Reference_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,IsValueDisplayed,Updated,UpdatedBy) VALUES (0,522,528,0,540640,155,TO_TIMESTAMP('2016-02-16 18:08:21','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N',TO_TIMESTAMP('2016-02-16 18:08:21','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 18:09
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='exists
(
select 1 from M_Product p where p.AD_Org_ID = AD_Org.AD_Org_ID
and p.M_Product_Mapping_ID = @M_Product_Mapping_ID@
)',Updated=TO_TIMESTAMP('2016-02-16 18:09:25','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540640
;
-- 16.02.2016 18:10
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540640,Updated=TO_TIMESTAMP('2016-02-16 18:10:10','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540901
;
-- 16.02.2016 18:11
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference (AD_Client_ID,AD_Org_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,IsOrderByValue,Name,Updated,UpdatedBy,ValidationType) VALUES (0,0,540641,TO_TIMESTAMP('2016-02-16 18:11:24','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N','M_Product_With_Mapping',TO_TIMESTAMP('2016-02-16 18:11:24','YYYY-MM-DD HH24:MI:SS'),100,'T')
;
-- 16.02.2016 18:11
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Reference_Trl (AD_Language,AD_Reference_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Reference_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Reference t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Reference_ID=540641 AND NOT EXISTS (SELECT * FROM AD_Reference_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Reference_ID=t.AD_Reference_ID)
;
-- 16.02.2016 18:11
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_Table (AD_Client_ID,AD_Display,AD_Key,AD_Org_ID,AD_Reference_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,IsValueDisplayed,Updated,UpdatedBy) VALUES (0,1410,1402,0,540641,208,TO_TIMESTAMP('2016-02-16 18:11:51','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','N',TO_TIMESTAMP('2016-02-16 18:11:51','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID@ and M_Product.M_Product_Mapping_ID = @M_Product_Mapping_ID@',Updated=TO_TIMESTAMP('2016-02-16 18:12:33','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,AD_Reference_Value_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsEncrypted,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542977,0,540658,540902,18,540641,'M_Product_Target_ID',TO_TIMESTAMP('2016-02-16 18:12:48','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product',0,'Y','N','Y','N','N','N','M_Product_Target_ID',20,TO_TIMESTAMP('2016-02-16 18:12:48','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540902 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 16.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsMandatory='Y',Updated=TO_TIMESTAMP('2016-02-16 18:12:50','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540902
;
-- 16.02.2016 18:12
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsMandatory='Y',Updated=TO_TIMESTAMP('2016-02-16 18:12:53','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540901
;
-- 16.02.2016 18:14
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process SET Classname='de.metas.product.process.M_Product_Remove_Mapping_Process',Updated=TO_TIMESTAMP('2016-02-16 18:14:52','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_ID=540658
;
-- 16.02.2016 18:16
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=18,Updated=TO_TIMESTAMP('2016-02-16 18:16:22','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540901
;
-- 16.02.2016 18:17
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID/1@ and M_Product.M_Product_Mapping_ID = @M_Product_Mapping_ID@',Updated=TO_TIMESTAMP('2016-02-16 18:17:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:17
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@ and M_Product.M_Product_Mapping_ID = @M_Product_Mapping_ID@',Updated=TO_TIMESTAMP('2016-02-16 18:17:42','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:18
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@',Updated=TO_TIMESTAMP('2016-02-16 18:18:54','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:19
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_Table SET WhereClause='exists
(
select 1 from M_Product p where p.AD_Org_ID = AD_Org.AD_Org_ID
and p.M_Product_Mapping_ID = @M_Product_Mapping_ID@
)
and AD_Org.AD_Org_ID <> @AD_Org_ID@',Updated=TO_TIMESTAMP('2016-02-16 18:19:46','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=540640
;
-- 16.02.2016 18:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=NULL,Updated=TO_TIMESTAMP('2016-02-16 18:33:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540902
;
-- 16.02.2016 18:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference_Trl WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference WHERE AD_Reference_ID=540641
;
-- 16.02.2016 18:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Val_Rule (AD_Client_ID,AD_Org_ID,AD_Val_Rule_ID,Created,CreatedBy,EntityType,IsActive,Name,Type,Updated,UpdatedBy) VALUES (0,0,540319,TO_TIMESTAMP('2016-02-16 18:33:41','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','M_Product_With_Mapping','S',TO_TIMESTAMP('2016-02-16 18:33:41','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 16.02.2016 18:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@',Updated=TO_TIMESTAMP('2016-02-16 18:33:54','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540319
;
-- 16.02.2016 18:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=540272,Updated=TO_TIMESTAMP('2016-02-16 18:34:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540902
;
-- 16.02.2016 18:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Val_Rule_ID=540319,Updated=TO_TIMESTAMP('2016-02-16 18:34:22','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540902
;
-- 16.02.2016 18:35
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='M_Product.AD_Org_ID = @AD_Org_Target_ID/-1@ and M_Product.M_Product_Mapping_ID = @M_Product_Mapping_ID@',Updated=TO_TIMESTAMP('2016-02-16 18:35:21','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540319
;
---- parameters not needed in the removing mapping process ( discussed with Tobi)
-- 17.02.2016 13:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_Value_ID=NULL,Updated=TO_TIMESTAMP('2016-02-17 13:51:08','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540901
;
-- 17.02.2016 13:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference_Trl WHERE AD_Reference_ID=540640
;
-- 17.02.2016 13:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Reference WHERE AD_Reference_ID=540640
;
-- 17.02.2016 13:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para_Trl WHERE AD_Process_Para_ID=540901
;
-- 17.02.2016 13:51
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para WHERE AD_Process_Para_ID=540901
;
-- 17.02.2016 13:52
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Val_Rule_ID=NULL,Updated=TO_TIMESTAMP('2016-02-17 13:52:18','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540902
;
-- 17.02.2016 13:52
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Val_Rule WHERE AD_Val_Rule_ID=540319
;
-- 17.02.2016 13:52
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para_Trl WHERE AD_Process_Para_ID=540902
;
-- 17.02.2016 13:52
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Process_Para WHERE AD_Process_Para_ID=540902
;
-- 17.02.2016 14:25
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process SET RefreshAllAfterExecution='Y',Updated=TO_TIMESTAMP('2016-02-17 14:25:03','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_ID=540658
;
-- 17.02.2016 15:22
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Val_Rule (AD_Client_ID,AD_Org_ID,AD_Val_Rule_ID,Created,CreatedBy,EntityType,IsActive,Name,Type,Updated,UpdatedBy) VALUES (0,0,540320,TO_TIMESTAMP('2016-02-17 15:22:08','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.product','Y','AD_Org_NoSameProductMapping','S',TO_TIMESTAMP('2016-02-17 15:22:08','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 17.02.2016 15:23
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Val_Rule SET Code='not exists
(
select 1 from M_Product p where p.AD_Org_ID = AD_Org.AD_Org_ID and p.M_Product_Mapping_ID = @M_Product_Mapping_ID/-1@
)',Updated=TO_TIMESTAMP('2016-02-17 15:23:02','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540320
;
-- 17.02.2016 15:23
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Val_Rule_ID=540320,Updated=TO_TIMESTAMP('2016-02-17 15:23:53','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540897
; | the_stack |
-- 2021-06-17T09:15:00.203Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Reference SET Name='Individual business partner''s name format',Updated=TO_TIMESTAMP('2021-06-17 12:15:00','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Reference_ID=541338
;
-- 2021-06-17T09:15:15.817Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Reference_Trl SET IsTranslated='Y', Name='Namensformat der Geschäftspartnerperson',Updated=TO_TIMESTAMP('2021-06-17 12:15:15','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_CH' AND AD_Reference_ID=541338
;
-- 2021-06-17T09:15:20.045Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Reference_Trl SET IsTranslated='Y', Name='Namensformat der Geschäftspartnerperson',Updated=TO_TIMESTAMP('2021-06-17 12:15:20','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_DE' AND AD_Reference_ID=541338
;
-- 2021-06-17T09:16:49.261Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Description='Describes how the new business partner''s name is initiated, based on the added contacts'' names and forms of address.', IsTranslated='Y', Name='Individual business partner''s name format', PrintName='Individual business partner''s name format',Updated=TO_TIMESTAMP('2021-06-17 12:16:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=579365 AND AD_Language='en_US'
;
-- 2021-06-17T09:16:49.279Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(579365,'en_US')
;
-- 2021-06-17T09:17:19.116Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET IsTranslated='Y', Name='Namensformat der Geschäftspartnerperson', PrintName='Namensformat der Geschäftspartnerperson',Updated=TO_TIMESTAMP('2021-06-17 12:17:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=579365 AND AD_Language='de_DE'
;
-- 2021-06-17T09:17:19.119Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(579365,'de_DE')
;
-- 2021-06-17T09:17:19.130Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_ad_element_on_ad_element_trl_update(579365,'de_DE')
;
-- 2021-06-17T09:17:19.132Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL WHERE AD_Element_ID=579365
;
-- 2021-06-17T09:17:19.135Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL, AD_Element_ID=579365 WHERE UPPER(ColumnName)='BPNAMEANDGREETINGSTRATEGY' AND IsCentrallyMaintained='Y' AND AD_Element_ID IS NULL
;
-- 2021-06-17T09:17:19.138Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL WHERE AD_Element_ID=579365 AND IsCentrallyMaintained='Y'
;
-- 2021-06-17T09:17:19.139Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL WHERE (AD_Column_ID IN (SELECT AD_Column_ID FROM AD_Column WHERE AD_Element_ID=579365) AND AD_Name_ID IS NULL ) OR (AD_Name_ID = 579365)
;
-- 2021-06-17T09:17:19.159Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_PrintFormatItem pi SET PrintName='Namensformat der Geschäftspartnerperson', Name='Namensformat der Geschäftspartnerperson' WHERE IsCentrallyMaintained='Y' AND EXISTS (SELECT * FROM AD_Column c WHERE c.AD_Column_ID=pi.AD_Column_ID AND c.AD_Element_ID=579365)
;
-- 2021-06-17T09:17:19.162Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL, CommitWarning = NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:17:19.166Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_WINDOW SET Name='Namensformat der Geschäftspartnerperson', Description='Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', Help=NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:17:19.169Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Menu SET Name = 'Namensformat der Geschäftspartnerperson', Description = 'Describes how the name of the new business partner is initialized, based on the names and greetings of the added contacts.', WEBUI_NameBrowse = NULL, WEBUI_NameNew = NULL, WEBUI_NameNewBreadcrumb = NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:17:26.859Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET IsTranslated='Y', Name='Namensformat der Geschäftspartnerperson', PrintName='Namensformat der Geschäftspartnerperson',Updated=TO_TIMESTAMP('2021-06-17 12:17:26','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=579365 AND AD_Language='de_CH'
;
-- 2021-06-17T09:17:26.861Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(579365,'de_CH')
;
-- 2021-06-17T09:17:40.480Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.',Updated=TO_TIMESTAMP('2021-06-17 12:17:40','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=579365 AND AD_Language='de_CH'
;
-- 2021-06-17T09:17:40.482Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(579365,'de_CH')
;
-- 2021-06-17T09:17:42.796Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.',Updated=TO_TIMESTAMP('2021-06-17 12:17:42','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=579365 AND AD_Language='de_DE'
;
-- 2021-06-17T09:17:42.799Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(579365,'de_DE')
;
-- 2021-06-17T09:17:42.817Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_ad_element_on_ad_element_trl_update(579365,'de_DE')
;
-- 2021-06-17T09:17:42.819Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL WHERE AD_Element_ID=579365
;
-- 2021-06-17T09:17:42.822Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL, AD_Element_ID=579365 WHERE UPPER(ColumnName)='BPNAMEANDGREETINGSTRATEGY' AND IsCentrallyMaintained='Y' AND AD_Element_ID IS NULL
;
-- 2021-06-17T09:17:42.824Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='BPNameAndGreetingStrategy', Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL WHERE AD_Element_ID=579365 AND IsCentrallyMaintained='Y'
;
-- 2021-06-17T09:17:42.826Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL WHERE (AD_Column_ID IN (SELECT AD_Column_ID FROM AD_Column WHERE AD_Element_ID=579365) AND AD_Name_ID IS NULL ) OR (AD_Name_ID = 579365)
;
-- 2021-06-17T09:17:42.842Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL, CommitWarning = NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:17:42.847Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_WINDOW SET Name='Namensformat der Geschäftspartnerperson', Description='Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', Help=NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:17:42.849Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Menu SET Name = 'Namensformat der Geschäftspartnerperson', Description = 'Beschreibt, wie der Name des neuen Geschäftspartners eingeleitet wird, basierend auf den Namen und der Anrede der hinzugefügten Kontakte.', WEBUI_NameBrowse = NULL, WEBUI_NameNew = NULL, WEBUI_NameNewBreadcrumb = NULL WHERE AD_Element_ID = 579365
;
-- 2021-06-17T09:18:10.441Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_List_Trl SET IsTranslated='Y', Name='Erstkontakt',Updated=TO_TIMESTAMP('2021-06-17 12:18:10','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_CH' AND AD_Ref_List_ID=542673
;
-- 2021-06-17T09:18:15.421Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_List_Trl SET IsTranslated='Y', Name='Erstkontakt',Updated=TO_TIMESTAMP('2021-06-17 12:18:15','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_DE' AND AD_Ref_List_ID=542673
;
-- 2021-06-17T09:18:29.367Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_List_Trl SET IsTranslated='Y', Name='Mitgliederkontakt',Updated=TO_TIMESTAMP('2021-06-17 12:18:29','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_DE' AND AD_Ref_List_ID=542674
;
-- 2021-06-17T09:18:33.908Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Ref_List_Trl SET IsTranslated='Y', Name='Mitgliederkontakt',Updated=TO_TIMESTAMP('2021-06-17 12:18:33','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Language='de_CH' AND AD_Ref_List_ID=542674
; | the_stack |
DROP TABLE IF EXISTS dec11;
CREATE TABLE dec11 (
program text NOT NULL
);
\COPY dec11 (program) FROM PROGRAM 'curl -b session.cookie https://adventofcode.com/2019/day/11/input';
VACUUM ANALYZE dec11;
/* FIRST STAR */
WITH RECURSIVE
machine (ip, relbase, output, state, x, y, dx, dy, painting, field) AS (
SELECT 0,
CAST(0 AS bigint),
null::text,
to_jsonb(CAST(regexp_split_to_array(program, ',') AS bigint[])),
0, 0, 0, -1,
true,
CAST('{}' AS jsonb)
FROM dec11
UNION ALL
SELECT /* ip */
CASE opcode
WHEN 'ADD' THEN ip + 4
WHEN 'MULT' THEN ip + 4
WHEN 'IN' THEN ip + 2
WHEN 'OUT' THEN ip + 2
WHEN 'JIT' THEN CASE WHEN arg1 <> 0 THEN arg2 ELSE ip + 3 END::integer
WHEN 'JIF' THEN CASE WHEN arg1 = 0 THEN arg2 ELSE ip + 3 END::integer
WHEN 'LT' THEN ip + 4
WHEN 'EQ' THEN ip + 4
WHEN 'RBO' THEN ip + 2
ELSE
ip
END,
/* relbase */
CASE WHEN opcode = 'RBO' THEN relbase + arg1 ELSE relbase END,
/* output */
CASE WHEN opcode = 'OUT' AND painting THEN format('%s,%s,%s', x, y, arg1) END,
/* state */
CASE opcode
WHEN 'ADD' THEN jsonb_set(extended_state, path3, to_jsonb(arg1 + arg2))
WHEN 'MULT' THEN jsonb_set(extended_state, path3, to_jsonb(arg1 * arg2))
WHEN 'IN' THEN jsonb_set(extended_state, path1, coalesce(field->xy, '0'))
WHEN 'OUT' THEN state
WHEN 'JIT' THEN state
WHEN 'JIF' THEN state
WHEN 'LT' THEN jsonb_set(extended_state, path3, to_jsonb(CASE WHEN arg1 < arg2 THEN 1 ELSE 0 END))
WHEN 'EQ' THEN jsonb_set(extended_state, path3, to_jsonb(CASE WHEN arg1 = arg2 THEN 1 ELSE 0 END))
WHEN 'RBO' THEN state
WHEN 'HALT' THEN state
ELSE
state
END,
/* x */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
x + CASE arg1 WHEN '0' THEN dy ELSE -dy END
ELSE
x
END,
/* y */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
y + CASE arg1 WHEN '0' THEN -dx ELSE dx END
ELSE
y
END,
/* dx */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
CASE arg1
WHEN '0' THEN dy
WHEN '1' THEN -dy
END
ELSE
dx
END,
/* dy */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
CASE arg1
WHEN '0' THEN -dx
WHEN '1' THEN dx
END
ELSE
dy
END,
/* painting */
(opcode = 'OUT') <> painting,
/* field */
CASE WHEN opcode = 'OUT' AND painting THEN
jsonb_set(field, ARRAY[xy], to_jsonb(arg1), true)
ELSE
field
END
FROM machine
/* Convenience functions */
CROSS JOIN LATERAL (VALUES (
/* opcode */
CASE CAST(state->ip AS bigint) % 100
WHEN 1 THEN 'ADD'
WHEN 2 THEN 'MULT'
WHEN 3 THEN 'IN'
WHEN 4 THEN 'OUT'
WHEN 5 THEN 'JIT'
WHEN 6 THEN 'JIF'
WHEN 7 THEN 'LT'
WHEN 8 THEN 'EQ'
WHEN 9 THEN 'RBO'
WHEN 99 THEN 'HALT'
ELSE CAST(CAST(state->ip AS bigint) % 100 AS text)
END,
/* path1 */
CASE CAST(state->ip AS bigint) / 100 % 10
WHEN 0 THEN ARRAY[CAST(state->(ip+1) AS text)]
WHEN 2 THEN ARRAY[CAST(relbase + CAST(state->(ip+1) AS integer) AS text)]
END,
/* path3 */
CASE CAST(state->ip AS bigint) / 10000 % 10
WHEN 0 THEN ARRAY[CAST(state->(ip+3) AS text)]
WHEN 2 THEN ARRAY[CAST(relbase + CAST(state->(ip+3) AS integer) AS text)]
END,
/* arg1 */
coalesce(
CASE CAST(state->ip AS bigint) / 100 % 10
WHEN 0 THEN CAST(state->CAST(state->(ip+1) AS integer) AS bigint)
WHEN 1 THEN CAST(state->(ip+1) AS bigint)
WHEN 2 THEN CAST(state->(relbase::integer + CAST(state->(ip+1) AS integer)) AS bigint)
END,
0),
/* arg2 */
CASE CAST(state->ip AS bigint) / 1000 % 10
WHEN 0 THEN CAST(state->CAST(state->(ip+2) AS integer) AS bigint)
WHEN 1 THEN CAST(state->(ip+2) AS bigint)
WHEN 2 THEN CAST(state->(relbase::integer + CAST(state->(ip+2) AS integer)) AS bigint)
END,
/* xy */
format('%s,%s', x, y)
)) AS v(opcode, path1, path3, arg1, arg2, xy)
/* Extend the state if needed */
CROSS JOIN LATERAL (
WITH RECURSIVE
gen (state, n) AS (
VALUES (state, 0)
UNION ALL
SELECT state || jsonb '0', n+1
FROM gen
WHERE CASE WHEN opcode = 'IN' THEN
jsonb_array_length(state) < CAST(path1[1] AS bigint)
WHEN opcode IN ('ADD', 'MULT', 'LT', 'EQ') THEN
jsonb_array_length(state) < CAST(path3[1] AS bigint)
END)
SELECT state
FROM gen
ORDER BY n DESC
FETCH FIRST ROW ONLY
) AS v2(extended_state)
WHERE state <> '"ERROR"'
AND opcode <> 'HALT'
)
SELECT count(*) AS first_star
FROM machine
CROSS JOIN LATERAL jsonb_each(field)
WHERE state->ip = '99'
;
/* SECOND STAR */
WITH RECURSIVE
machine (ip, relbase, output, state, x, y, dx, dy, painting, field) AS (
SELECT 0,
CAST(0 AS bigint),
null::text,
to_jsonb(CAST(regexp_split_to_array(program, ',') AS bigint[])),
0, 0, 0, -1,
true,
CAST('{"0,0":1}' AS jsonb)
FROM dec11
UNION ALL
SELECT /* ip */
CASE opcode
WHEN 'ADD' THEN ip + 4
WHEN 'MULT' THEN ip + 4
WHEN 'IN' THEN ip + 2
WHEN 'OUT' THEN ip + 2
WHEN 'JIT' THEN CASE WHEN arg1 <> 0 THEN arg2 ELSE ip + 3 END::integer
WHEN 'JIF' THEN CASE WHEN arg1 = 0 THEN arg2 ELSE ip + 3 END::integer
WHEN 'LT' THEN ip + 4
WHEN 'EQ' THEN ip + 4
WHEN 'RBO' THEN ip + 2
ELSE
ip
END,
/* relbase */
CASE WHEN opcode = 'RBO' THEN relbase + arg1 ELSE relbase END,
/* output */
CASE WHEN opcode = 'OUT' AND painting THEN format('%s,%s,%s', x, y, arg1) END,
/* state */
CASE opcode
WHEN 'ADD' THEN jsonb_set(extended_state, path3, to_jsonb(arg1 + arg2))
WHEN 'MULT' THEN jsonb_set(extended_state, path3, to_jsonb(arg1 * arg2))
WHEN 'IN' THEN jsonb_set(extended_state, path1, coalesce(field->xy, '0'))
WHEN 'OUT' THEN state
WHEN 'JIT' THEN state
WHEN 'JIF' THEN state
WHEN 'LT' THEN jsonb_set(extended_state, path3, to_jsonb(CASE WHEN arg1 < arg2 THEN 1 ELSE 0 END))
WHEN 'EQ' THEN jsonb_set(extended_state, path3, to_jsonb(CASE WHEN arg1 = arg2 THEN 1 ELSE 0 END))
WHEN 'RBO' THEN state
WHEN 'HALT' THEN state
ELSE
state
END,
/* x */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
x + CASE arg1 WHEN '0' THEN dy ELSE -dy END
ELSE
x
END,
/* y */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
y + CASE arg1 WHEN '0' THEN -dx ELSE dx END
ELSE
y
END,
/* dx */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
CASE arg1
WHEN '0' THEN dy
WHEN '1' THEN -dy
END
ELSE
dx
END,
/* dy */
CASE WHEN opcode = 'OUT' AND NOT painting THEN
CASE arg1
WHEN '0' THEN -dx
WHEN '1' THEN dx
END
ELSE
dy
END,
/* painting */
(opcode = 'OUT') <> painting,
/* field */
CASE WHEN opcode = 'OUT' AND painting THEN
jsonb_set(field, ARRAY[xy], to_jsonb(arg1), true)
ELSE
field
END
FROM machine
/* Convenience functions */
CROSS JOIN LATERAL (VALUES (
/* opcode */
CASE CAST(state->ip AS bigint) % 100
WHEN 1 THEN 'ADD'
WHEN 2 THEN 'MULT'
WHEN 3 THEN 'IN'
WHEN 4 THEN 'OUT'
WHEN 5 THEN 'JIT'
WHEN 6 THEN 'JIF'
WHEN 7 THEN 'LT'
WHEN 8 THEN 'EQ'
WHEN 9 THEN 'RBO'
WHEN 99 THEN 'HALT'
ELSE CAST(CAST(state->ip AS bigint) % 100 AS text)
END,
/* path1 */
CASE CAST(state->ip AS bigint) / 100 % 10
WHEN 0 THEN ARRAY[CAST(state->(ip+1) AS text)]
WHEN 2 THEN ARRAY[CAST(relbase + CAST(state->(ip+1) AS integer) AS text)]
END,
/* path3 */
CASE CAST(state->ip AS bigint) / 10000 % 10
WHEN 0 THEN ARRAY[CAST(state->(ip+3) AS text)]
WHEN 2 THEN ARRAY[CAST(relbase + CAST(state->(ip+3) AS integer) AS text)]
END,
/* arg1 */
coalesce(
CASE CAST(state->ip AS bigint) / 100 % 10
WHEN 0 THEN CAST(state->CAST(state->(ip+1) AS integer) AS bigint)
WHEN 1 THEN CAST(state->(ip+1) AS bigint)
WHEN 2 THEN CAST(state->(relbase::integer + CAST(state->(ip+1) AS integer)) AS bigint)
END,
0),
/* arg2 */
CASE CAST(state->ip AS bigint) / 1000 % 10
WHEN 0 THEN CAST(state->CAST(state->(ip+2) AS integer) AS bigint)
WHEN 1 THEN CAST(state->(ip+2) AS bigint)
WHEN 2 THEN CAST(state->(relbase::integer + CAST(state->(ip+2) AS integer)) AS bigint)
END,
/* xy */
format('%s,%s', x, y)
)) AS v(opcode, path1, path3, arg1, arg2, xy)
/* Extend the state if needed */
CROSS JOIN LATERAL (
WITH RECURSIVE
gen (state, n) AS (
VALUES (state, 0)
UNION ALL
SELECT state || jsonb '0', n+1
FROM gen
WHERE CASE WHEN opcode = 'IN' THEN
jsonb_array_length(state) < CAST(path1[1] AS bigint)
WHEN opcode IN ('ADD', 'MULT', 'LT', 'EQ') THEN
jsonb_array_length(state) < CAST(path3[1] AS bigint)
END)
SELECT state
FROM gen
ORDER BY n DESC
FETCH FIRST ROW ONLY
) AS v2(extended_state)
WHERE state <> '"ERROR"'
AND opcode <> 'HALT'
),
paint (x, y, value) AS (
SELECT CAST(substring(key FOR position(',' IN key) - 1) AS integer),
CAST(substring(key FROM position(',' IN key) + 1) AS integer),
CAST(value AS integer)
FROM machine
CROSS JOIN LATERAL jsonb_each(field)
WHERE state->ip = '99'
),
xs (x) AS (
SELECT min(x) FROM paint
UNION ALL
SELECT x + 1 FROM xs WHERE x < (SELECT max(x) FROM paint)
),
ys (y) AS (
SELECT min(y) FROM paint
UNION ALL
SELECT y + 1 FROM ys WHERE y < (SELECT max(y) FROM paint)
)
SELECT string_agg(CASE WHEN p.value = 1 THEN '#' ELSE ' ' END, '' ORDER BY xs.x) AS second_star
FROM xs CROSS JOIN ys
LEFT JOIN paint AS p ON (p.x, p.y) = (xs.x, ys.y)
GROUP BY ys.y
ORDER BY ys.y
; | the_stack |
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for article
-- ----------------------------
DROP TABLE IF EXISTS `article`;
CREATE TABLE `article` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`content` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '' COMMENT '文章内容',
`create_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '更新时间',
`delete_status` varchar(1) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '1' COMMENT '是否有效 1.有效 2无效',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 21 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '发布号作者表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of article
-- ----------------------------
INSERT INTO `article` VALUES (5, '莎士比亚', '2017-10-25 09:08:45', '2017-10-30 17:59:41', '1');
INSERT INTO `article` VALUES (6, '亚里士多德', '2017-10-26 10:49:28', '2017-11-18 09:54:15', '1');
INSERT INTO `article` VALUES (10, '亚历山大', '2017-10-26 14:57:45', '2017-11-08 13:28:52', '1');
INSERT INTO `article` VALUES (11, '李白', '2017-10-26 15:23:42', '2017-10-26 15:23:42', '1');
INSERT INTO `article` VALUES (19, '文章test写得好', '2017-11-18 13:37:07', '2019-04-26 16:53:59', '1');
INSERT INTO `article` VALUES (20, 'My Name Is Anay', '2019-04-26 22:27:21', '2019-04-26 22:27:21', '1');
-- ----------------------------
-- Table structure for qrtz_cron_triggers
-- ----------------------------
DROP TABLE IF EXISTS `qrtz_cron_triggers`;
CREATE TABLE `qrtz_cron_triggers` (
`SCHED_NAME` varchar(120) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`TRIGGER_NAME` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`TRIGGER_GROUP` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`CRON_EXPRESSION` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`TIME_ZONE_ID` varchar(80) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
PRIMARY KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for qrtz_job_details
-- ----------------------------
DROP TABLE IF EXISTS `qrtz_job_details`;
CREATE TABLE `qrtz_job_details` (
`SCHED_NAME` varchar(120) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`JOB_NAME` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`JOB_GROUP` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`DESCRIPTION` varchar(250) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`JOB_CLASS_NAME` varchar(250) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`IS_DURABLE` varchar(1) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`IS_NONCONCURRENT` varchar(1) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`IS_UPDATE_DATA` varchar(1) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`REQUESTS_RECOVERY` varchar(1) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`JOB_DATA` blob NULL,
PRIMARY KEY (`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for qrtz_triggers
-- ----------------------------
DROP TABLE IF EXISTS `qrtz_triggers`;
CREATE TABLE `qrtz_triggers` (
`SCHED_NAME` varchar(120) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`TRIGGER_NAME` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL,
`TRIGGER_GROUP` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`JOB_NAME` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`JOB_GROUP` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`DESCRIPTION` varchar(250) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`NEXT_FIRE_TIME` bigint(13) NULL DEFAULT NULL,
`PREV_FIRE_TIME` bigint(13) NULL DEFAULT NULL,
`PRIORITY` int(11) NULL DEFAULT NULL,
`TRIGGER_STATE` varchar(16) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`TRIGGER_TYPE` varchar(8) CHARACTER SET latin1 COLLATE latin1_swedish_ci NOT NULL,
`START_TIME` bigint(13) NOT NULL,
`END_TIME` bigint(13) NULL DEFAULT NULL,
`CALENDAR_NAME` varchar(200) CHARACTER SET latin1 COLLATE latin1_swedish_ci NULL DEFAULT NULL,
`MISFIRE_INSTR` smallint(2) NULL DEFAULT NULL,
`JOB_DATA` blob NULL,
PRIMARY KEY (`SCHED_NAME`, `TRIGGER_NAME`, `TRIGGER_GROUP`) USING BTREE,
INDEX `SCHED_NAME`(`SCHED_NAME`, `JOB_NAME`, `JOB_GROUP`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = latin1 COLLATE = latin1_swedish_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Table structure for sys_permission
-- ----------------------------
DROP TABLE IF EXISTS `sys_permission`;
CREATE TABLE `sys_permission` (
`id` int(11) NOT NULL DEFAULT 0 COMMENT '自定id,主要供前端展示权限列表分类排序使用.',
`menu_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '' COMMENT '归属菜单,前端判断并展示菜单使用,',
`menu_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '' COMMENT '菜单的中文释义',
`permission_code` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '' COMMENT '权限的代码/通配符,对应代码中@RequiresPermissions 的value',
`permission_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '' COMMENT '本权限的中文释义',
`required_permission` tinyint(1) NULL DEFAULT 2 COMMENT '是否本菜单必选权限, 1.必选 2非必选 通常是\"列表\"权限是必选',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '后台权限表' ROW_FORMAT = Compact;
-- ----------------------------
-- Records of sys_permission
-- ----------------------------
INSERT INTO `sys_permission` VALUES (101, 'article', '文章管理', 'article:list', '列表', 1);
INSERT INTO `sys_permission` VALUES (102, 'article', '文章管理', 'article:add', '新增', 2);
INSERT INTO `sys_permission` VALUES (103, 'article', '文章管理', 'article:update', '修改', 2);
INSERT INTO `sys_permission` VALUES (601, 'user', '用户', 'user:list', '列表', 1);
INSERT INTO `sys_permission` VALUES (602, 'user', '用户', 'user:add', '新增', 2);
INSERT INTO `sys_permission` VALUES (603, 'user', '用户', 'user:update', '修改', 2);
INSERT INTO `sys_permission` VALUES (701, 'role', '角色权限', 'role:list', '列表', 1);
INSERT INTO `sys_permission` VALUES (702, 'role', '角色权限', 'role:add', '新增', 2);
INSERT INTO `sys_permission` VALUES (703, 'role', '角色权限', 'role:update', '修改', 2);
INSERT INTO `sys_permission` VALUES (704, 'role', '角色权限', 'role:delete', '删除', 2);
-- ----------------------------
-- Table structure for sys_role
-- ----------------------------
DROP TABLE IF EXISTS `sys_role`;
CREATE TABLE `sys_role` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`role_name` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '角色名',
`create_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP,
`update_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
`delete_status` varchar(1) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '1' COMMENT '是否有效 1有效 2无效',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 5 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '后台角色表' ROW_FORMAT = Compact;
-- ----------------------------
-- Records of sys_role
-- ----------------------------
INSERT INTO `sys_role` VALUES (1, '管理员', '2017-11-22 16:24:34', '2017-11-22 16:24:52', '1');
INSERT INTO `sys_role` VALUES (2, '作家', '2017-11-22 16:24:34', '2017-11-22 16:24:52', '1');
INSERT INTO `sys_role` VALUES (3, '程序员', '2017-11-22 16:28:47', '2017-11-22 16:28:47', '1');
INSERT INTO `sys_role` VALUES (4, '业务员', '2019-04-26 16:52:25', '2019-04-26 16:52:25', '1');
-- ----------------------------
-- Table structure for sys_role_permission
-- ----------------------------
DROP TABLE IF EXISTS `sys_role_permission`;
CREATE TABLE `sys_role_permission` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`role_id` int(11) NULL DEFAULT NULL COMMENT '角色id',
`permission_id` int(11) NULL DEFAULT NULL COMMENT '权限id',
`create_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP,
`update_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0),
`delete_status` varchar(1) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '1' COMMENT '是否有效 1有效 2无效',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 24 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '角色-权限关联表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of sys_role_permission
-- ----------------------------
INSERT INTO `sys_role_permission` VALUES (1, 2, 101, '2017-11-22 16:26:21', '2017-11-22 16:26:32', '1');
INSERT INTO `sys_role_permission` VALUES (2, 2, 102, '2017-11-22 16:26:21', '2017-11-22 16:26:32', '1');
INSERT INTO `sys_role_permission` VALUES (5, 2, 602, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (6, 2, 601, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (7, 2, 603, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (8, 2, 703, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (9, 2, 701, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (10, 2, 702, '2017-11-22 16:28:28', '2017-11-22 16:28:28', '1');
INSERT INTO `sys_role_permission` VALUES (11, 2, 704, '2017-11-22 16:28:31', '2017-11-22 16:28:31', '1');
INSERT INTO `sys_role_permission` VALUES (12, 2, 103, '2017-11-22 16:28:31', '2017-11-22 16:28:31', '1');
INSERT INTO `sys_role_permission` VALUES (13, 3, 601, '2017-11-22 16:28:47', '2017-11-22 16:28:47', '1');
INSERT INTO `sys_role_permission` VALUES (14, 3, 701, '2017-11-22 16:28:47', '2017-11-22 16:28:47', '1');
INSERT INTO `sys_role_permission` VALUES (15, 3, 702, '2017-11-22 16:35:01', '2017-11-22 16:35:01', '1');
INSERT INTO `sys_role_permission` VALUES (16, 3, 704, '2017-11-22 16:35:01', '2017-11-22 16:35:01', '1');
INSERT INTO `sys_role_permission` VALUES (17, 3, 102, '2017-11-22 16:35:01', '2017-11-22 16:35:01', '1');
INSERT INTO `sys_role_permission` VALUES (18, 3, 101, '2017-11-22 16:35:01', '2017-11-22 16:35:01', '1');
INSERT INTO `sys_role_permission` VALUES (19, 3, 603, '2017-11-22 16:35:01', '2017-11-22 16:35:01', '1');
INSERT INTO `sys_role_permission` VALUES (20, 4, 101, '2019-04-26 16:52:25', '2019-04-26 16:52:25', '1');
INSERT INTO `sys_role_permission` VALUES (21, 4, 102, '2019-04-26 16:52:25', '2019-04-26 16:52:25', '1');
INSERT INTO `sys_role_permission` VALUES (22, 4, 601, '2019-04-26 16:52:25', '2019-04-26 16:52:25', '1');
INSERT INTO `sys_role_permission` VALUES (23, 4, 602, '2019-04-26 16:52:25', '2019-04-26 16:52:25', '1');
-- ----------------------------
-- Table structure for sys_role_user
-- ----------------------------
DROP TABLE IF EXISTS `sys_role_user`;
CREATE TABLE `sys_role_user` (
`id` int(4) NOT NULL AUTO_INCREMENT,
`Sys_User_id` int(4) NULL DEFAULT NULL,
`Sys_Role_id` int(4) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of sys_role_user
-- ----------------------------
INSERT INTO `sys_role_user` VALUES (1, 1, 1);
INSERT INTO `sys_role_user` VALUES (2, 2, 2);
-- ----------------------------
-- Table structure for sys_user
-- ----------------------------
DROP TABLE IF EXISTS `sys_user`;
CREATE TABLE `sys_user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '用户名',
`password` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '密码',
`nickname` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '昵称',
`role_id` int(11) NULL DEFAULT 0 COMMENT '角色ID',
`create_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` timestamp(0) NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '修改时间',
`delete_status` varchar(1) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT '1' COMMENT '是否有效 1有效 2无效',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 10008 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '运营后台用户表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of sys_user
-- ----------------------------
INSERT INTO `sys_user` VALUES (10003, 'admin', '123456', '超级用户', 1, '2017-10-30 11:52:38', '2019-04-27 17:02:37', '1');
INSERT INTO `sys_user` VALUES (10004, 'user', '123456', '莎士比亚', 2, '2017-10-30 16:13:02', '2017-11-18 02:48:24', '1');
INSERT INTO `sys_user` VALUES (10005, 'aaa', '123456', '总经理', 1, '2017-11-15 14:02:56', '2019-04-27 00:14:35', '1');
INSERT INTO `sys_user` VALUES (10007, 'test', '123456', '业务员小刘', 4, '2017-11-22 16:29:41', '2019-04-26 16:52:57', '1');
-- ----------------------------
-- Table structure for t1
-- ----------------------------
DROP TABLE IF EXISTS `t1`;
CREATE TABLE `t1` (
`C1` int(11) NOT NULL,
`C2` varchar(45) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
PRIMARY KEY (`C1`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t1
-- ----------------------------
INSERT INTO `t1` VALUES (1, '1');
INSERT INTO `t1` VALUES (2, '2');
INSERT INTO `t1` VALUES (3, '3');
-- ----------------------------
-- Table structure for t2
-- ----------------------------
DROP TABLE IF EXISTS `t2`;
CREATE TABLE `t2` (
`C1` int(11) NOT NULL,
`C2` varchar(45) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
PRIMARY KEY (`C1`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t2
-- ----------------------------
INSERT INTO `t2` VALUES (1, '1');
INSERT INTO `t2` VALUES (2, '1');
INSERT INTO `t2` VALUES (3, '2');
INSERT INTO `t2` VALUES (4, '4');
-- ----------------------------
-- Table structure for t_actor
-- ----------------------------
DROP TABLE IF EXISTS `t_actor`;
CREATE TABLE `t_actor` (
`Id` bigint(20) NOT NULL,
`openid` varchar(45) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '微信的openid',
`mobile` varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '手机号码',
`org_id` varchar(20) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '机构ID',
`name` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '姓名',
`nick_name` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`gender` varchar(2) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '性别:0未知,1:男,2:女',
`head_img` varchar(256) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '用户头像uri',
`wx_body` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '微信信息使用标准json格式存储',
`employee_no` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '雇员号码',
`email` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '邮箱',
`status` int(2) NULL DEFAULT NULL COMMENT '参与者状态0:未参与,1:手机未认证,2:正常',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`Id`) USING BTREE,
UNIQUE INDEX `org_id`(`org_id`, `openid`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci COMMENT = '参与者' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_actor
-- ----------------------------
INSERT INTO `t_actor` VALUES (1137363323835183106, NULL, '15501033589', NULL, 'h3', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-08 22:18:42', NULL);
INSERT INTO `t_actor` VALUES (1137401001620303873, NULL, NULL, NULL, 'h4', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-09 00:48:25', NULL);
INSERT INTO `t_actor` VALUES (1137402390442418177, NULL, NULL, NULL, 'h6', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-09 00:53:56', NULL);
INSERT INTO `t_actor` VALUES (1137402741056901122, NULL, NULL, NULL, 'h7', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-09 00:55:20', NULL);
INSERT INTO `t_actor` VALUES (1137402808778133506, NULL, NULL, NULL, 'h9', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-09 00:55:36', NULL);
INSERT INTO `t_actor` VALUES (1137404493177065473, NULL, NULL, NULL, 'h11', 'master', NULL, NULL, NULL, NULL, NULL, NULL, '2019-06-09 01:02:18', NULL);
-- ----------------------------
-- Table structure for t_awards
-- ----------------------------
DROP TABLE IF EXISTS `t_awards`;
CREATE TABLE `t_awards` (
`Id` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '',
`org_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '机构ID',
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '' COMMENT '奖品名称',
`level` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '奖品等级',
`total` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '' COMMENT '奖品总数',
`sent_num` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '' COMMENT '派送出去的数量',
`status` int(11) NULL DEFAULT NULL COMMENT '奖品发送状态:0,未开始,1,已开始,2已结束',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '创建时间',
`update_time` datetime(0) NULL DEFAULT NULL COMMENT '更新时间',
`refuse_uid` varchar(1024) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '拒绝用户列表',
`keep_uid` varchar(1024) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '保留中奖用户',
PRIMARY KEY (`Id`) USING BTREE,
UNIQUE INDEX `org_id`(`org_id`, `name`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '奖项表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_awards
-- ----------------------------
INSERT INTO `t_awards` VALUES ('2e430c6ac46d4e16959e87d6c90b91d0', '1', '三等奖', '1', '10', '0', 0, '2017-11-29 16:29:54', NULL, '166b722305a64445a56d2c0fb16e85e1,2be0bc02c197463aa2be2e0342a1aec4,a4d81f6ae8004d6d9e5cd11bca0b8197', '85ca5cffed8045bb802484d3378b3242,a7f74285f738451b979f7240a46eeb53');
INSERT INTO `t_awards` VALUES ('34961f98f260434e8772f8b88bdb4c6e', '0', '三等奖', '1', '10', '0', 0, '2017-11-28 14:34:47', NULL, NULL, NULL);
INSERT INTO `t_awards` VALUES ('540e81abafa248e6bea644cffda97c0e', '0', '一等奖', '1', '10', '0', 0, '2017-11-28 14:34:22', NULL, NULL, NULL);
INSERT INTO `t_awards` VALUES ('b19d908b5a8e44ae9390a3919d51c71e', '0', '特等奖', '1', '10', '0', 0, '2017-11-28 14:35:06', NULL, NULL, NULL);
INSERT INTO `t_awards` VALUES ('b9fb86c07a7942dfa346c6f00e3393fd', '0', '感恩节大回馈', '1', '10', '0', 0, '2017-11-28 14:33:16', NULL, NULL, NULL);
INSERT INTO `t_awards` VALUES ('ebf1dfe36a7244b6888cde938239e5f4', '0', '二等奖', '1', '10', '0', 0, '2017-11-28 14:34:37', NULL, NULL, NULL);
-- ----------------------------
-- Table structure for t_org
-- ----------------------------
DROP TABLE IF EXISTS `t_org`;
CREATE TABLE `t_org` (
`id` int(4) NOT NULL,
`org_name` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`charge_name` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '负责人名称',
`charge_mobile` varchar(11) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL COMMENT '机构联系电话',
`appid` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`appsecret` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
`token` varchar(45) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '公众号介入token',
`access_token` varchar(128) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '公众号访问token',
`token_fresh_time` datetime(0) NULL DEFAULT NULL,
`create_time` datetime(0) NULL DEFAULT NULL,
`status` int(11) NULL DEFAULT NULL,
`backUrl` varchar(128) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '授权后跳转的页面',
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '组织机构表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_org
-- ----------------------------
INSERT INTO `t_org` VALUES (0, NULL, '马化腾', '15501033589', 'wx3cb81c3c95c1a755', '600bd6c8edb8ca5340e409910845ca5c', NULL, NULL, NULL, NULL, NULL, NULL);
INSERT INTO `t_org` VALUES (1, NULL, '马化腾', '15501033589', 'wx3cb81c3c95c1a755', '600bd6c8edb8ca5340e409910845ca5c', '71D8AC937E27443A', NULL, NULL, NULL, 0, 'https://www.baidu.com/');
INSERT INTO `t_org` VALUES (2, '阿里巴巴', '马化腾', '15501033589', 'wx3cb81c3c95c1a755', '600bd6c8edb8ca5340e409910845ca5c', 'F98663360852E242', NULL, NULL, NULL, 0, NULL);
-- ----------------------------
-- Table structure for t_owner
-- ----------------------------
DROP TABLE IF EXISTS `t_owner`;
CREATE TABLE `t_owner` (
`id` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`name` varchar(45) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`mobile` varchar(11) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`build` varchar(2) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '楼号',
`unit` varchar(2) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '單元',
`floor` varchar(2) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT ' 楼层',
`room` varchar(4) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL COMMENT '房间号',
`electfee` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`waterfee` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`createtime` datetime(0) NULL DEFAULT NULL,
`updatetime` datetime(0) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `idx_room`(`room`, `build`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci COMMENT = ' 业主表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_owner
-- ----------------------------
INSERT INTO `t_owner` VALUES ('3528017ef4484ddc99a1c69cf67a7253', '冯鑫亮', NULL, '6', '1', '4', '414', '0.00', '0.00', '2018-01-28 17:10:20', '2018-01-28 17:10:20');
INSERT INTO `t_owner` VALUES ('415b17cf8f0b4ac98d6cd165bd7f4540', '李狗蛋', NULL, '6', '1', '5', '523', '0.00', '0.00', '2018-01-28 17:05:37', '2018-01-28 17:05:37');
INSERT INTO `t_owner` VALUES ('63f93407be6d47278f3182333b1c52ef', '贾业增', NULL, '6', '1', '4', '416', '0.00', '0.00', '2018-01-28 17:08:30', '2018-01-28 17:08:30');
INSERT INTO `t_owner` VALUES ('9460d2d2926b42b99ad7c09efdd804b6', '亲亲我', NULL, '6', '1', '1', '113', '0.00', '0.00', '2018-01-28 17:12:46', '2018-01-28 17:12:46');
INSERT INTO `t_owner` VALUES ('b24e3551cafb4f65be5767ace0f1a82e', '王利刚', NULL, '6', '1', '1', '115', '66.45', '23.88', '2018-01-28 13:29:40', '2018-01-28 18:04:33');
-- ----------------------------
-- Table structure for t_prize
-- ----------------------------
DROP TABLE IF EXISTS `t_prize`;
CREATE TABLE `t_prize` (
`Id` varchar(32) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '',
`uid` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '' COMMENT '中奖用户表',
`award_id` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '' COMMENT '奖项ID',
`create_time` datetime(0) NULL DEFAULT NULL COMMENT '中奖时间',
`reward_index` bigint(20) NOT NULL DEFAULT 0 COMMENT '中奖的序列,纳秒级别',
`receive_time` timestamp(0) NULL DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP(0) COMMENT '领取时间',
`status` varchar(2) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT '0' COMMENT '奖品状态:0可领取,1暂时不可领取,2已经作废,3已领取',
PRIMARY KEY (`Id`) USING BTREE,
INDEX `uid`(`uid`, `award_id`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '奖品表' ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of t_prize
-- ----------------------------
INSERT INTO `t_prize` VALUES ('49f6ba5a5c434066b8db7b583511cd4f', '810e29852abc4360a68628a1cad17068', '34961f98f260434e8772f8b88bdb4c6e', '2017-11-28 18:13:07', 1067541279477033, NULL, '0');
INSERT INTO `t_prize` VALUES ('8d3803bc160248b9a0cb60358add3e52', '85ca5cffed8045bb802484d3378b3242', '34961f98f260434e8772f8b88bdb4c6e', '2017-11-28 18:13:07', 1067541279495215, NULL, '0');
INSERT INTO `t_prize` VALUES ('ca395050c5cd4d4d8e770e2e94e376b7', '7a85550705714a38bc5b4d2a55a39a71', '34961f98f260434e8772f8b88bdb4c6e', '2017-11-28 18:13:07', 1067541279287596, NULL, '0');
INSERT INTO `t_prize` VALUES ('cb8c69e64b3842f585f621ad322c1a7b', '', '540e81abafa248e6bea644cffda97c0e', '2017-11-28 17:53:11', 1066345721510330, NULL, '0');
-- ----------------------------
-- Table structure for user
-- ----------------------------
DROP TABLE IF EXISTS `user`;
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(10) CHARACTER SET gb2312 COLLATE gb2312_chinese_ci NULL DEFAULT NULL,
`password` varchar(10) CHARACTER SET gb2312 COLLATE gb2312_chinese_ci NULL DEFAULT NULL,
`transactor` varchar(10) CHARACTER SET gb2312 COLLATE gb2312_chinese_ci NULL DEFAULT NULL,
`update_time` date NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 17 CHARACTER SET = gb2312 COLLATE = gb2312_chinese_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of user
-- ----------------------------
INSERT INTO `user` VALUES (1, 'A', 's', NULL, '2019-05-10');
INSERT INTO `user` VALUES (2, '李四', 's', NULL, NULL);
INSERT INTO `user` VALUES (11, '哈哈', '1234abc', NULL, NULL);
INSERT INTO `user` VALUES (12, '大山', NULL, NULL, NULL);
INSERT INTO `user` VALUES (13, '大山', NULL, NULL, NULL);
INSERT INTO `user` VALUES (14, '大山', NULL, NULL, NULL);
INSERT INTO `user` VALUES (15, 'B', NULL, NULL, NULL);
INSERT INTO `user` VALUES (16, 'A', NULL, NULL, NULL);
SET FOREIGN_KEY_CHECKS = 1; | the_stack |
SET NOEXEC OFF
SET ANSI_WARNINGS ON
SET XACT_ABORT ON
SET IMPLICIT_TRANSACTIONS OFF
SET ARITHABORT ON
SET QUOTED_IDENTIFIER ON
SET NUMERIC_ROUNDABORT OFF
SET CONCAT_NULL_YIELDS_NULL ON
SET ANSI_NULLS ON
SET ANSI_PADDING ON
GO
BEGIN TRAN
GO
/* Create new category disabled column */
ALTER TABLE [dbo].[BugNet_ProjectCategories] ADD [Disabled] [bit] DEFAULT 0 NOT NULL
GO
ALTER TABLE [dbo].[BugNet_Issues] ALTER COLUMN [IssueStatusId] [int] NULL
GO
ALTER TABLE [dbo].[BugNet_Issues] ALTER COLUMN [IssuePriorityId] [int] NULL
GO
ALTER TABLE [dbo].[BugNet_Issues] ALTER COLUMN [IssueTypeId] [int] NULL
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectCategories_DeleteCategory] Script Date: 02/16/2011 13:01:23 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[BugNet_ProjectCategories_DeleteCategory]
@CategoryId Int
AS
UPDATE BugNet_ProjectCategories SET
[Disabled] = 1
WHERE
CategoryId = @CategoryId
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectCategories_GetCategoriesByProjectId] Script Date: 02/16/2011 13:01:44 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[BugNet_ProjectCategories_GetCategoriesByProjectId]
@ProjectId int
AS
SELECT
CategoryId,
ProjectId,
CategoryName,
ParentCategoryId,
(SELECT COUNT(*) FROM BugNet_ProjectCategories WHERE ParentCategoryId=c.CategoryId) ChildCount,
Disabled
FROM BugNet_ProjectCategories c
WHERE
ProjectId = @ProjectId AND [Disabled] = 0
ORDER BY CategoryName
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectCategories_GetChildCategoriesByCategoryId] Script Date: 02/16/2011 13:02:39 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[BugNet_ProjectCategories_GetChildCategoriesByCategoryId]
@CategoryId int
AS
SELECT
CategoryId,
ProjectId,
CategoryName,
ParentCategoryId,
(SELECT COUNT(*) FROM BugNet_ProjectCategories WHERE ParentCategoryId = c.CategoryId) ChildCount
FROM BugNet_ProjectCategories c
WHERE
c.ParentCategoryId = @CategoryId AND [Disabled] = 0
ORDER BY CategoryName
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectCategories_GetRootCategoriesByProjectId] Script Date: 02/16/2011 13:03:49 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
ALTER PROCEDURE [dbo].[BugNet_ProjectCategories_GetRootCategoriesByProjectId]
@ProjectId int
AS
SELECT
CategoryId,
ProjectId,
CategoryName,
ParentCategoryId,
(SELECT COUNT(*) FROM BugNet_ProjectCategories WHERE ParentCategoryId=c.CategoryId) ChildCount
FROM BugNet_ProjectCategories c
WHERE
ProjectId = @ProjectId AND c.ParentCategoryId = 0 AND [Disabled] = 0
ORDER BY CategoryName
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectMailbox_CreateProjectMailbox] Script Date: 02/16/2011 12:44:36 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_CreateProjectMailbox]
@MailBox nvarchar (100),
@ProjectId int,
@AssignToUserName nvarchar(255),
@IssueTypeID int
AS
DECLARE @AssignToUserId UNIQUEIDENTIFIER
SELECT @AssignToUserId = UserId FROM aspnet_users WHERE Username = @AssignToUserName
INSERT BugNet_ProjectMailBoxes
(
MailBox,
ProjectId,
AssignToUserId,
IssueTypeID
)
VALUES
(
@MailBox,
@ProjectId,
@AssignToUserId,
@IssueTypeId
)
RETURN scope_identity()
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectMailbox_DeleteProjectMailbox] Script Date: 02/16/2011 12:44:55 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER OFF
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_DeleteProjectMailbox]
@ProjectMailboxId int
AS
DELETE
BugNet_ProjectMailBoxes
WHERE
ProjectMailboxId = @ProjectMailboxId
IF @@ROWCOUNT > 0
RETURN 0
ELSE
RETURN 1
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectMailbox_GetMailboxByProjectId] Script Date: 02/16/2011 12:45:16 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_GetMailboxByProjectId]
@ProjectId int
AS
SELECT BugNet_ProjectMailboxes.*,
u.Username AssignToUserName,
p.DisplayName AssignToDisplayName,
BugNet_ProjectIssueTypes.IssueTypeName
FROM
BugNet_ProjectMailBoxes
INNER JOIN aspnet_Users u ON u.UserId = AssignToUserId
INNER JOIN BugNet_UserProfiles p ON u.UserName = p.UserName
INNER JOIN BugNet_ProjectIssueTypes ON BugNet_ProjectIssueTypes.IssueTypeId = BugNet_ProjectMailboxes.IssueTypeId
WHERE
BugNet_ProjectMailBoxes.ProjectId = @ProjectId
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectMailbox_GetProjectByMailbox] Script Date: 02/16/2011 12:45:59 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_GetProjectByMailbox]
(
@mailbox nvarchar(256)
)
AS
SET NOCOUNT ON
SELECT BugNet_ProjectMailBoxes.MailBox, BugNet_ProjectMailBoxes.ProjectMailboxId, BugNet_ProjectMailBoxes.ProjectId,
BugNet_ProjectMailBoxes.IssueTypeId, Users.UserName AS AssignToName, BugNet_ProjectMailBoxes.AssignToUserId,
BugNet_ProjectIssueTypes.IssueTypeName
FROM BugNet_Projects INNER JOIN
BugNet_ProjectMailBoxes ON BugNet_ProjectMailBoxes.ProjectId = BugNet_Projects.ProjectId INNER JOIN
aspnet_Users AS Users ON BugNet_ProjectMailBoxes.AssignToUserId = Users.UserId INNER JOIN
BugNet_ProjectIssueTypes ON BugNet_Projects.ProjectId = BugNet_ProjectIssueTypes.ProjectId AND
BugNet_ProjectMailBoxes.IssueTypeId = BugNet_ProjectIssueTypes.IssueTypeId
WHERE (BugNet_ProjectMailBoxes.MailBox = @mailbox)
GO
/****** Object: StoredProcedure [dbo].[BugNet_ProjectMailbox_UpdateProjectMailbox] Script Date: 02/16/2011 12:46:18 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_UpdateProjectMailbox]
@ProjectMailboxId int,
@MailBoxEmailAddress nvarchar (100),
@ProjectId int,
@AssignToUserName nvarchar(255),
@IssueTypeId int
AS
DECLARE @AssignToUserId UNIQUEIDENTIFIER
SELECT @AssignToUserId = UserId FROM aspnet_users WHERE Username = @AssignToUserName
UPDATE BugNet_ProjectMailBoxes SET
MailBox = @MailBoxEmailAddress,
ProjectId = @ProjectId,
AssignToUserId = @AssignToUserId,
IssueTypeId = @IssueTypeId
WHERE ProjectMailboxId = @ProjectMailboxId
GO
UPDATE BugNet_HostSettings SET SettingName = 'UserRegistration' WHERE SettingName = 'DisableUserRegistration'
UPDATE BugNet_HostSettings SET SettingValue = '0' WHERE SettingName ='UserRegistration'
UPDATE BugNet_HostSettings SET SettingName = 'AnonymousAccess' WHERE SettingName = 'DisableAnonymousAccess'
UPDATE BugNet_HostSettings SET SettingValue = 'False' WHERE SettingName ='AnonymousAccess'
GO
DROP TABLE [dbo].[BugNet_StringResources]
GO
CREATE TABLE [dbo].[BugNet_Languages] (
[LanguageId] int NOT NULL IDENTITY(1, 1),
[CultureCode] nvarchar(50) NOT NULL,
[CultureName] nvarchar(200) NOT NULL,
[FallbackCulture] nvarchar(50) NULL
)
GO
ALTER TABLE [dbo].[BugNet_Languages]
ADD
CONSTRAINT [PK_BugNet_Languages]
PRIMARY KEY
([LanguageId])
GO
CREATE UNIQUE NONCLUSTERED INDEX IX_BugNet_Languages ON [dbo].[BugNet_Languages] ( CultureCode )
GO
/****** Object: StoredProcedure [dbo].[BugNet_Languages_GetInstalledLanguages] Script Date: 04/15/2011 12:01:09 ******/
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
CREATE PROCEDURE [dbo].[BugNet_Languages_GetInstalledLanguages]
AS
BEGIN
SET NOCOUNT ON;
SELECT DISTINCT cultureCode FROM BugNet_Languages
END
GO
DROP PROCEDURE [dbo].[BugNet_StringResources_GetInstalledLanguageResources]
GO
INSERT INTO [dbo].[BugNet_HostSettings] ([SettingName], [SettingValue]) VALUES('ApplicationDefaultLanguage','en-US')
INSERT INTO [dbo].[BugNet_HostSettings] ([SettingName], [SettingValue]) VALUES('Pop3ProcessAttachments','False')
INSERT INTO [dbo].[BugNet_Languages] ([CultureCode], [CultureName], [FallbackCulture]) VALUES('en-US', 'English (United States)', 'en')
PRINT N'Updating Permission keys'
UPDATE BugNet_Permissions SET PermissionKey = 'CloseIssue' WHERE PermissionId = 1
UPDATE BugNet_Permissions SET PermissionKey = 'AddIssue' WHERE PermissionId = 2
UPDATE BugNet_Permissions SET PermissionKey = 'AssignIssue' WHERE PermissionId = 3
UPDATE BugNet_Permissions SET PermissionKey = 'EditIssue' WHERE PermissionId = 4
UPDATE BugNet_Permissions SET PermissionKey = 'SubscribeIssue' WHERE PermissionId = 5
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteIssue' WHERE PermissionId = 6
UPDATE BugNet_Permissions SET PermissionKey = 'AddComment' WHERE PermissionId = 7
UPDATE BugNet_Permissions SET PermissionKey = 'EditComment' WHERE PermissionId = 8
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteComment' WHERE PermissionId = 9
UPDATE BugNet_Permissions SET PermissionKey = 'AddAttachment' WHERE PermissionId = 10
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteAttachment' WHERE PermissionId = 11
UPDATE BugNet_Permissions SET PermissionKey = 'AddRelated' WHERE PermissionId = 12
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteRelated' WHERE PermissionId = 13
UPDATE BugNet_Permissions SET PermissionKey = 'ReopenIssue' WHERE PermissionId = 14
UPDATE BugNet_Permissions SET PermissionKey = 'OwnerEditComment' WHERE PermissionId = 15
UPDATE BugNet_Permissions SET PermissionKey = 'EditIssueDescription' WHERE PermissionId = 16
UPDATE BugNet_Permissions SET PermissionKey = 'EditIssueTitle' WHERE PermissionId = 17
UPDATE BugNet_Permissions SET PermissionKey = 'AdminEditProject' WHERE PermissionId = 18
UPDATE BugNet_Permissions SET PermissionKey = 'AddTimeEntry' WHERE PermissionId = 19
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteTimeEntry' WHERE PermissionId = 20
UPDATE BugNet_Permissions SET PermissionKey = 'AdminCreateProject' WHERE PermissionId = 21
UPDATE BugNet_Permissions SET PermissionKey = 'AddQuery' WHERE PermissionId = 22
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteQuery' WHERE PermissionId = 23
UPDATE BugNet_Permissions SET PermissionKey = 'AdminCloneProject' WHERE PermissionId = 24
UPDATE BugNet_Permissions SET PermissionKey = 'AddSubIssue' WHERE PermissionId = 25
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteSubIssue' WHERE PermissionId = 26
UPDATE BugNet_Permissions SET PermissionKey = 'AddParentIssue' WHERE PermissionId = 27
UPDATE BugNet_Permissions SET PermissionKey = 'DeleteParentIssue' WHERE PermissionId = 28
UPDATE BugNet_Permissions SET PermissionKey = 'AdminDeleteProject' WHERE PermissionId = 29
UPDATE BugNet_Permissions SET PermissionKey = 'ViewProjectCalendar' WHERE PermissionId = 30
UPDATE BugNet_Permissions SET PermissionKey = 'ChangeIssueStatus' WHERE PermissionId = 31
UPDATE BugNet_Permissions SET PermissionKey = 'EditQuery' WHERE PermissionId = 32
GO
IF EXISTS (SELECT * FROM sys.views WHERE object_id = OBJECT_ID(N'[BugNet_GetIssuesByProjectIdAndCustomFieldView]'))
DROP VIEW [dbo].[BugNet_GetIssuesByProjectIdAndCustomFieldView]
GO
IF EXISTS (SELECT * FROM sys.views WHERE object_id = OBJECT_ID(N'[BugNet_IssuesView]'))
DROP VIEW [dbo].[BugNet_IssuesView]
GO
CREATE VIEW [dbo].[BugNet_GetIssuesByProjectIdAndCustomFieldView]
AS
SELECT
dbo.BugNet_Issues.IssueId,
dbo.BugNet_Issues.Disabled,
dbo.BugNet_Issues.IssueTitle,
dbo.BugNet_Issues.IssueDescription,
dbo.BugNet_Issues.IssueStatusId,
dbo.BugNet_Issues.IssuePriorityId,
dbo.BugNet_Issues.IssueTypeId,
dbo.BugNet_Issues.IssueCategoryId,
dbo.BugNet_Issues.ProjectId,
dbo.BugNet_Issues.IssueResolutionId,
dbo.BugNet_Issues.IssueCreatorUserId,
dbo.BugNet_Issues.IssueAssignedUserId,
dbo.BugNet_Issues.IssueAffectedMilestoneId,
dbo.BugNet_Issues.IssueOwnerUserId,
dbo.BugNet_Issues.IssueDueDate,
dbo.BugNet_Issues.IssueMilestoneId,
dbo.BugNet_Issues.IssueVisibility,
dbo.BugNet_Issues.IssueEstimation,
dbo.BugNet_Issues.DateCreated,
dbo.BugNet_Issues.LastUpdate,
dbo.BugNet_Issues.LastUpdateUserId,
dbo.BugNet_Projects.ProjectName,
dbo.BugNet_Projects.ProjectCode,
ISNULL(dbo.BugNet_ProjectPriorities.PriorityName, N'none') AS PriorityName,
ISNULL(dbo.BugNet_ProjectIssueTypes.IssueTypeName,N'none') AS IssueTypeName,
ISNULL(dbo.BugNet_ProjectCategories.CategoryName, N'none') AS CategoryName,
ISNULL(dbo.BugNet_ProjectStatus.StatusName, N'none') AS StatusName ,
ISNULL(dbo.BugNet_ProjectMilestones.MilestoneName, N'none') AS MilestoneName,
ISNULL(AffectedMilestone.MilestoneName, N'none') AS AffectedMilestoneName,
ISNULL(dbo.BugNet_ProjectResolutions.ResolutionName, 'none') AS ResolutionName,
LastUpdateUsers.UserName AS LastUpdateUserName,
ISNULL(AssignedUsers.UserName, N'none') AS AssignedUsername,
ISNULL(AssignedUsersProfile.DisplayName, N'none') AS AssignedDisplayName,
CreatorUsers.UserName AS CreatorUserName,
ISNULL(CreatorUsersProfile.DisplayName, N'none') AS CreatorDisplayName,
ISNULL(OwnerUsers.UserName, 'none') AS OwnerUserName,
ISNULL(OwnerUsersProfile.DisplayName, N'none') AS OwnerDisplayName,
ISNULL(LastUpdateUsersProfile.DisplayName, 'none') AS LastUpdateDisplayName,
ISNULL(dbo.BugNet_ProjectPriorities.PriorityImageUrl, '') AS PriorityImageUrl,
ISNULL(dbo.BugNet_ProjectIssueTypes.IssueTypeImageUrl, '') AS IssueTypeImageUrl,
ISNULL(dbo.BugNet_ProjectStatus.StatusImageUrl, '') AS StatusImageUrl,
ISNULL(dbo.BugNet_ProjectMilestones.MilestoneImageUrl, '') AS MilestoneImageUrl,
ISNULL(dbo.BugNet_ProjectResolutions.ResolutionImageUrl, '') AS ResolutionImageUrl,
ISNULL(AffectedMilestone.MilestoneImageUrl, '')
AS AffectedMilestoneImageUrl, ISNULL
((SELECT SUM(Duration) AS Expr1
FROM dbo.BugNet_IssueWorkReports AS WR
WHERE (IssueId = dbo.BugNet_Issues.IssueId)), 0.00) AS TimeLogged, ISNULL
((SELECT COUNT(IssueId) AS Expr1
FROM dbo.BugNet_IssueVotes AS V
WHERE (IssueId = dbo.BugNet_Issues.IssueId)), 0) AS IssueVotes,
dbo.BugNet_ProjectCustomFields.CustomFieldName,
dbo.BugNet_ProjectCustomFieldValues.CustomFieldValue,
dbo.BugNet_Issues.IssueProgress,
dbo.BugNet_ProjectMilestones.MilestoneDueDate,
dbo.BugNet_Projects.ProjectDisabled,
CAST(COALESCE (dbo.BugNet_ProjectStatus.IsClosedState, 0) AS BIT) AS IsClosed
FROM
dbo.BugNet_ProjectCustomFields
INNER JOIN
dbo.BugNet_ProjectCustomFieldValues ON dbo.BugNet_ProjectCustomFields.CustomFieldId = dbo.BugNet_ProjectCustomFieldValues.CustomFieldId
RIGHT OUTER JOIN
dbo.BugNet_Issues ON dbo.BugNet_ProjectCustomFieldValues.IssueId = dbo.BugNet_Issues.IssueId
LEFT OUTER JOIN
dbo.BugNet_ProjectIssueTypes ON dbo.BugNet_Issues.IssueTypeId = dbo.BugNet_ProjectIssueTypes.IssueTypeId
LEFT OUTER JOIN
dbo.BugNet_ProjectPriorities ON dbo.BugNet_Issues.IssuePriorityId = dbo.BugNet_ProjectPriorities.PriorityId
LEFT OUTER JOIN
dbo.BugNet_ProjectCategories ON dbo.BugNet_Issues.IssueCategoryId = dbo.BugNet_ProjectCategories.CategoryId
LEFT OUTER JOIN
dbo.BugNet_ProjectStatus ON dbo.BugNet_Issues.IssueStatusId = dbo.BugNet_ProjectStatus.StatusId
LEFT OUTER JOIN
dbo.BugNet_ProjectMilestones AS AffectedMilestone ON dbo.BugNet_Issues.IssueAffectedMilestoneId = AffectedMilestone.MilestoneId
LEFT OUTER JOIN
dbo.BugNet_ProjectMilestones ON dbo.BugNet_Issues.IssueMilestoneId = dbo.BugNet_ProjectMilestones.MilestoneId
LEFT OUTER JOIN
dbo.BugNet_ProjectResolutions ON dbo.BugNet_Issues.IssueResolutionId = dbo.BugNet_ProjectResolutions.ResolutionId
LEFT OUTER JOIN
dbo.aspnet_Users AS AssignedUsers ON dbo.BugNet_Issues.IssueAssignedUserId = AssignedUsers.UserId
LEFT OUTER JOIN
dbo.aspnet_Users AS LastUpdateUsers ON dbo.BugNet_Issues.LastUpdateUserId = LastUpdateUsers.UserId
LEFT OUTER JOIN
dbo.aspnet_Users AS CreatorUsers ON dbo.BugNet_Issues.IssueCreatorUserId = CreatorUsers.UserId
LEFT OUTER JOIN
dbo.aspnet_Users AS OwnerUsers ON dbo.BugNet_Issues.IssueOwnerUserId = OwnerUsers.UserId
LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS CreatorUsersProfile ON CreatorUsers.UserName = CreatorUsersProfile.UserName
LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS AssignedUsersProfile ON AssignedUsers.UserName = AssignedUsersProfile.UserName
LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS OwnerUsersProfile ON OwnerUsers.UserName = OwnerUsersProfile.UserName
LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS LastUpdateUsersProfile ON LastUpdateUsers.UserName = LastUpdateUsersProfile.UserName
LEFT OUTER JOIN
dbo.BugNet_Projects ON dbo.BugNet_Issues.ProjectId = dbo.BugNet_Projects.ProjectId
GO
CREATE VIEW [dbo].[BugNet_IssuesView]
AS
SELECT dbo.BugNet_Issues.IssueId, dbo.BugNet_Issues.IssueTitle, dbo.BugNet_Issues.IssueDescription, dbo.BugNet_Issues.IssueStatusId,
dbo.BugNet_Issues.IssuePriorityId, dbo.BugNet_Issues.IssueTypeId, dbo.BugNet_Issues.IssueCategoryId, dbo.BugNet_Issues.ProjectId,
dbo.BugNet_Issues.IssueResolutionId, dbo.BugNet_Issues.IssueCreatorUserId, dbo.BugNet_Issues.IssueAssignedUserId, dbo.BugNet_Issues.IssueOwnerUserId,
dbo.BugNet_Issues.IssueDueDate, dbo.BugNet_Issues.IssueMilestoneId, dbo.BugNet_Issues.IssueAffectedMilestoneId, dbo.BugNet_Issues.IssueVisibility,
dbo.BugNet_Issues.IssueEstimation, dbo.BugNet_Issues.DateCreated, dbo.BugNet_Issues.LastUpdate, dbo.BugNet_Issues.LastUpdateUserId,
dbo.BugNet_Projects.ProjectName, dbo.BugNet_Projects.ProjectCode, ISNULL(dbo.BugNet_ProjectPriorities.PriorityName, N'none') AS PriorityName,
ISNULL(dbo.BugNet_ProjectIssueTypes.IssueTypeName,N'none') AS IssueTypeName,
ISNULL(dbo.BugNet_ProjectCategories.CategoryName, N'none') AS CategoryName, ISNULL(dbo.BugNet_ProjectStatus.StatusName, N'none') AS StatusName,
ISNULL(dbo.BugNet_ProjectMilestones.MilestoneName, N'none') AS MilestoneName, ISNULL(AffectedMilestone.MilestoneName, N'none') AS AffectedMilestoneName,
ISNULL(dbo.BugNet_ProjectResolutions.ResolutionName, 'none') AS ResolutionName, LastUpdateUsers.UserName AS LastUpdateUserName,
ISNULL(AssignedUsers.UserName, N'none') AS AssignedUsername, ISNULL(AssignedUsersProfile.DisplayName, N'none') AS AssignedDisplayName,
CreatorUsers.UserName AS CreatorUserName, ISNULL(CreatorUsersProfile.DisplayName, N'none') AS CreatorDisplayName, ISNULL(OwnerUsers.UserName, 'none')
AS OwnerUserName, ISNULL(OwnerUsersProfile.DisplayName, N'none') AS OwnerDisplayName, ISNULL(LastUpdateUsersProfile.DisplayName, 'none')
AS LastUpdateDisplayName, ISNULL(dbo.BugNet_ProjectPriorities.PriorityImageUrl, '') AS PriorityImageUrl,
ISNULL(dbo.BugNet_ProjectIssueTypes.IssueTypeImageUrl, '') AS IssueTypeImageUrl, ISNULL(dbo.BugNet_ProjectStatus.StatusImageUrl, '') AS StatusImageUrl,
ISNULL(dbo.BugNet_ProjectMilestones.MilestoneImageUrl, '') AS MilestoneImageUrl, ISNULL(dbo.BugNet_ProjectResolutions.ResolutionImageUrl, '')
AS ResolutionImageUrl, ISNULL(AffectedMilestone.MilestoneImageUrl, '') AS AffectedMilestoneImageUrl, ISNULL
((SELECT SUM(Duration) AS Expr1
FROM dbo.BugNet_IssueWorkReports AS WR
WHERE (IssueId = dbo.BugNet_Issues.IssueId)), 0.00) AS TimeLogged, ISNULL
((SELECT COUNT(IssueId) AS Expr1
FROM dbo.BugNet_IssueVotes AS V
WHERE (IssueId = dbo.BugNet_Issues.IssueId)), 0) AS IssueVotes, dbo.BugNet_Issues.Disabled, dbo.BugNet_Issues.IssueProgress,
dbo.BugNet_ProjectMilestones.MilestoneDueDate,
dbo.BugNet_Projects.ProjectDisabled,
CAST(COALESCE (dbo.BugNet_ProjectStatus.IsClosedState, 0) AS BIT) AS IsClosed
FROM dbo.BugNet_Issues LEFT OUTER JOIN
dbo.BugNet_ProjectIssueTypes ON dbo.BugNet_Issues.IssueTypeId = dbo.BugNet_ProjectIssueTypes.IssueTypeId LEFT OUTER JOIN
dbo.BugNet_ProjectPriorities ON dbo.BugNet_Issues.IssuePriorityId = dbo.BugNet_ProjectPriorities.PriorityId LEFT OUTER JOIN
dbo.BugNet_ProjectCategories ON dbo.BugNet_Issues.IssueCategoryId = dbo.BugNet_ProjectCategories.CategoryId LEFT OUTER JOIN
dbo.BugNet_ProjectStatus ON dbo.BugNet_Issues.IssueStatusId = dbo.BugNet_ProjectStatus.StatusId LEFT OUTER JOIN
dbo.BugNet_ProjectMilestones AS AffectedMilestone ON dbo.BugNet_Issues.IssueAffectedMilestoneId = AffectedMilestone.MilestoneId LEFT OUTER JOIN
dbo.BugNet_ProjectMilestones ON dbo.BugNet_Issues.IssueMilestoneId = dbo.BugNet_ProjectMilestones.MilestoneId LEFT OUTER JOIN
dbo.BugNet_ProjectResolutions ON dbo.BugNet_Issues.IssueResolutionId = dbo.BugNet_ProjectResolutions.ResolutionId LEFT OUTER JOIN
dbo.aspnet_Users AS AssignedUsers ON dbo.BugNet_Issues.IssueAssignedUserId = AssignedUsers.UserId LEFT OUTER JOIN
dbo.aspnet_Users AS LastUpdateUsers ON dbo.BugNet_Issues.LastUpdateUserId = LastUpdateUsers.UserId LEFT OUTER JOIN
dbo.aspnet_Users AS CreatorUsers ON dbo.BugNet_Issues.IssueCreatorUserId = CreatorUsers.UserId LEFT OUTER JOIN
dbo.aspnet_Users AS OwnerUsers ON dbo.BugNet_Issues.IssueOwnerUserId = OwnerUsers.UserId LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS CreatorUsersProfile ON CreatorUsers.UserName = CreatorUsersProfile.UserName LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS AssignedUsersProfile ON AssignedUsers.UserName = AssignedUsersProfile.UserName LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS OwnerUsersProfile ON OwnerUsers.UserName = OwnerUsersProfile.UserName LEFT OUTER JOIN
dbo.BugNet_UserProfiles AS LastUpdateUsersProfile ON LastUpdateUsers.UserName = LastUpdateUsersProfile.UserName LEFT OUTER JOIN
dbo.BugNet_Projects ON dbo.BugNet_Issues.ProjectId = dbo.BugNet_Projects.ProjectId
GO
IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[BugNet_Project_GetRoadMap]') AND type in (N'P', N'PC'))
DROP PROCEDURE [dbo].[BugNet_Project_GetRoadMap]
GO
CREATE PROCEDURE [dbo].[BugNet_Project_GetRoadMap]
@ProjectId int
AS
SELECT
PM.SortOrder AS MilestoneSortOrder,
IssueId,
IssueTitle,
IssueDescription,
IssueStatusId,
IssuePriorityId,
IssueTypeId,
IssueCategoryId,
BugNet_IssuesView.ProjectId,
IssueResolutionId,
IssueCreatorUserId,
IssueAssignedUserId,
IssueOwnerUserId,
IssueDueDate,
BugNet_IssuesView.IssueMilestoneId,
IssueVisibility,
BugNet_IssuesView.DateCreated,
IssueEstimation,
LastUpdate,
LastUpdateUserId,
ProjectName,
ProjectCode,
PriorityName,
IssueTypeName,
CategoryName,
StatusName,
ResolutionName,
BugNet_IssuesView.MilestoneName,
BugNet_IssuesView.MilestoneDueDate,
IssueAffectedMilestoneId,
AffectedMilestoneName,
AffectedMilestoneImageUrl,
LastUpdateUserName,
AssignedUserName,
AssignedDisplayName,
CreatorUserName,
CreatorDisplayName,
OwnerUserName,
OwnerDisplayName,
LastUpdateDisplayName,
PriorityImageUrl,
IssueTypeImageUrl,
StatusImageUrl,
BugNet_IssuesView.MilestoneImageUrl,
ResolutionImageUrl,
TimeLogged,
IssueProgress,
[Disabled],
IssueVotes,
IsClosed
FROM
BugNet_IssuesView JOIN BugNet_ProjectMilestones PM on IssueMilestoneId = MilestoneId
WHERE
BugNet_IssuesView.ProjectId = @ProjectId AND BugNet_IssuesView.Disabled = 0
AND
IssueMilestoneId IN (SELECT DISTINCT IssueMilestoneId FROM BugNet_IssuesView WHERE BugNet_IssuesView.Disabled = 0 AND IssueStatusId IN(SELECT StatusId FROM BugNet_ProjectStatus WHERE IsClosedState = 0 AND ProjectId = @ProjectId))
ORDER BY
(CASE WHEN PM.SortOrder IS NULL THEN 1 ELSE 0 END),PM.SortOrder , IssueStatusId ASC, IssueTypeId ASC,IssueCategoryId ASC, AssignedUserName ASC
GO
IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[BugNet_ProjectMailbox_GetMailboxById]') AND type in (N'P', N'PC'))
DROP PROCEDURE [dbo].[BugNet_ProjectMailbox_GetMailboxById]
GO
IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[BugNet_ProjectMailbox_GetMailboxByProjectId]') AND type in (N'P', N'PC'))
DROP PROCEDURE [dbo].[BugNet_ProjectMailbox_GetMailboxByProjectId]
GO
IF EXISTS (SELECT * FROM sys.objects WHERE object_id = OBJECT_ID(N'[BugNet_ProjectMailbox_GetProjectByMailbox]') AND type in (N'P', N'PC'))
DROP PROCEDURE [dbo].[BugNet_ProjectMailbox_GetProjectByMailbox]
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_GetMailboxById]
@ProjectMailboxId int
AS
SET NOCOUNT ON
SELECT
BugNet_ProjectMailboxes.*,
u.Username AssignToUserName,
p.DisplayName AssignToDisplayName,
BugNet_ProjectIssueTypes.IssueTypeName
FROM
BugNet_ProjectMailBoxes
INNER JOIN aspnet_Users u ON u.UserId = AssignToUserId
INNER JOIN BugNet_UserProfiles p ON u.UserName = p.UserName
INNER JOIN BugNet_ProjectIssueTypes ON BugNet_ProjectIssueTypes.IssueTypeId = BugNet_ProjectMailboxes.IssueTypeId
WHERE
BugNet_ProjectMailBoxes.ProjectMailboxId = @ProjectMailboxId
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_GetMailboxByProjectId]
@ProjectId int
AS
SET NOCOUNT ON
SELECT
BugNet_ProjectMailboxes.*,
u.Username AssignToUserName,
p.DisplayName AssignToDisplayName,
pit.IssueTypeName
FROM
BugNet_ProjectMailBoxes
INNER JOIN aspnet_Users u ON u.UserId = AssignToUserId
INNER JOIN BugNet_UserProfiles p ON u.UserName = p.UserName
INNER JOIN BugNet_ProjectIssueTypes pit ON pit.IssueTypeId = BugNet_ProjectMailboxes.IssueTypeId
WHERE
BugNet_ProjectMailBoxes.ProjectId = @ProjectId
GO
CREATE PROCEDURE [dbo].[BugNet_ProjectMailbox_GetProjectByMailbox]
@mailbox nvarchar(100)
AS
SET NOCOUNT ON
SELECT
BugNet_ProjectMailboxes.*,
u.Username AssignToUserName,
p.DisplayName AssignToDisplayName,
pit.IssueTypeName
FROM
BugNet_ProjectMailBoxes
INNER JOIN aspnet_Users u ON u.UserId = AssignToUserId
INNER JOIN BugNet_UserProfiles p ON u.UserName = p.UserName
INNER JOIN BugNet_ProjectIssueTypes pit ON pit.IssueTypeId = BugNet_ProjectMailboxes.IssueTypeId
WHERE
BugNet_ProjectMailBoxes.MailBox = @mailbox
GO
COMMIT
SET NOEXEC OFF
GO | the_stack |
-- 2020-05-18T13:19:37.467Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Name='Internal planned UAT date', PrintName='Internal planned UAT date',Updated=TO_TIMESTAMP('2020-05-18 16:19:37','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=577727 AND AD_Language='de_CH'
;
-- 2020-05-18T13:19:37.509Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(577727,'de_CH')
;
-- 2020-05-18T13:19:44.983Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Name='Internal planned UAT date', PrintName='Internal planned UAT date',Updated=TO_TIMESTAMP('2020-05-18 16:19:44','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=577727 AND AD_Language='de_DE'
;
-- 2020-05-18T13:19:44.985Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(577727,'de_DE')
;
-- 2020-05-18T13:19:45.019Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_ad_element_on_ad_element_trl_update(577727,'de_DE')
;
-- 2020-05-18T13:19:45.021Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnName='Internal_PlannedUATDate', Name='Internal planned UAT date', Description=NULL, Help=NULL WHERE AD_Element_ID=577727
;
-- 2020-05-18T13:19:45.022Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='Internal_PlannedUATDate', Name='Internal planned UAT date', Description=NULL, Help=NULL, AD_Element_ID=577727 WHERE UPPER(ColumnName)='INTERNAL_PLANNEDUATDATE' AND IsCentrallyMaintained='Y' AND AD_Element_ID IS NULL
;
-- 2020-05-18T13:19:45.032Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='Internal_PlannedUATDate', Name='Internal planned UAT date', Description=NULL, Help=NULL WHERE AD_Element_ID=577727 AND IsCentrallyMaintained='Y'
;
-- 2020-05-18T13:19:45.033Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Internal planned UAT date', Description=NULL, Help=NULL WHERE (AD_Column_ID IN (SELECT AD_Column_ID FROM AD_Column WHERE AD_Element_ID=577727) AND AD_Name_ID IS NULL ) OR (AD_Name_ID = 577727)
;
-- 2020-05-18T13:19:45.094Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_PrintFormatItem pi SET PrintName='Internal planned UAT date', Name='Internal planned UAT date' WHERE IsCentrallyMaintained='Y' AND EXISTS (SELECT * FROM AD_Column c WHERE c.AD_Column_ID=pi.AD_Column_ID AND c.AD_Element_ID=577727)
;
-- 2020-05-18T13:19:45.096Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET Name='Internal planned UAT date', Description=NULL, Help=NULL, CommitWarning = NULL WHERE AD_Element_ID = 577727
;
-- 2020-05-18T13:19:45.097Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_WINDOW SET Name='Internal planned UAT date', Description=NULL, Help=NULL WHERE AD_Element_ID = 577727
;
-- 2020-05-18T13:19:45.097Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Menu SET Name = 'Internal planned UAT date', Description = NULL, WEBUI_NameBrowse = NULL, WEBUI_NameNew = NULL, WEBUI_NameNewBreadcrumb = NULL WHERE AD_Element_ID = 577727
;
-- 2020-05-18T13:19:49.947Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Name='Internal planned UAT date', PrintName='Internal planned UAT date',Updated=TO_TIMESTAMP('2020-05-18 16:19:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=577727 AND AD_Language='en_US'
;
-- 2020-05-18T13:19:49.948Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(577727,'en_US')
;
-- 2020-05-18T13:19:54.474Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element_Trl SET Name='Internal planned UAT date', PrintName='Internal planned UAT date',Updated=TO_TIMESTAMP('2020-05-18 16:19:54','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=577727 AND AD_Language='nl_NL'
;
-- 2020-05-18T13:19:54.476Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ select update_TRL_Tables_On_AD_Element_TRL_Update(577727,'nl_NL')
;
-- 2020-05-18T13:24:09.357Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='N', SeqNoGrid=0,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566864
;
-- 2020-05-18T13:24:09.360Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='N', SeqNoGrid=0,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566868
;
-- 2020-05-18T13:24:09.363Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=40,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568709
;
-- 2020-05-18T13:24:09.365Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=50,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568041
;
-- 2020-05-18T13:24:09.368Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=60,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568245
;
-- 2020-05-18T13:24:09.370Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=70,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568243
;
-- 2020-05-18T13:24:09.372Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=80,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568021
;
-- 2020-05-18T13:24:09.375Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=90,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=569202
;
-- 2020-05-18T13:24:09.377Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=100,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=569203
;
-- 2020-05-18T13:24:09.379Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=110,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568023
;
-- 2020-05-18T13:24:09.381Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=120,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=569204
;
-- 2020-05-18T13:24:09.383Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=130,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568024
;
-- 2020-05-18T13:24:09.384Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=140,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=569205
;
-- 2020-05-18T13:24:09.386Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=150,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566869
;
-- 2020-05-18T13:24:09.388Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=160,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568027
;
-- 2020-05-18T13:24:09.390Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=170,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566871
;
-- 2020-05-18T13:24:09.391Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=180,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568025
;
-- 2020-05-18T13:24:09.393Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=190,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568022
;
-- 2020-05-18T13:24:09.395Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=200,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568708
;
-- 2020-05-18T13:24:09.397Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=210,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566880
;
-- 2020-05-18T13:24:09.399Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=220,Updated=TO_TIMESTAMP('2020-05-18 16:24:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566873
;
-- 2020-05-18T13:24:19.152Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=50,Updated=TO_TIMESTAMP('2020-05-18 16:24:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568243
;
-- 2020-05-18T13:24:19.157Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=60,Updated=TO_TIMESTAMP('2020-05-18 16:24:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568041
;
-- 2020-05-18T13:24:19.162Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=70,Updated=TO_TIMESTAMP('2020-05-18 16:24:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=568245
;
-- 2020-05-18T13:24:59.895Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET WidgetSize='S',Updated=TO_TIMESTAMP('2020-05-18 16:24:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=566861
;
-- 2020-05-18T13:30:37.200Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET AD_Reference_ID=30, AD_Reference_Value_ID=541104,Updated=TO_TIMESTAMP('2020-05-18 16:30:37','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=570795
;
-- 2020-05-18T13:31:11.918Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Element SET ColumnName='Internal_S_Milestone_ID', EntityType='de.metas.serviceprovider',Updated=TO_TIMESTAMP('2020-05-18 16:31:11','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=577724
;
-- 2020-05-18T13:31:11.920Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnName='Internal_S_Milestone_ID', Name='Internal milestone', Description=NULL, Help=NULL WHERE AD_Element_ID=577724
;
-- 2020-05-18T13:31:11.921Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='Internal_S_Milestone_ID', Name='Internal milestone', Description=NULL, Help=NULL, AD_Element_ID=577724 WHERE UPPER(ColumnName)='INTERNAL_S_MILESTONE_ID' AND IsCentrallyMaintained='Y' AND AD_Element_ID IS NULL
;
-- 2020-05-18T13:31:11.922Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET ColumnName='Internal_S_Milestone_ID', Name='Internal milestone', Description=NULL, Help=NULL WHERE AD_Element_ID=577724 AND IsCentrallyMaintained='Y'
;
-- 2020-05-18T14:04:57.986Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO t_alter_column values('s_issue','Status','VARCHAR(25)',null,'New')
;
-- 2020-05-18T14:04:58.008Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE S_Issue SET Status='New' WHERE Status IS NULL
;
-- 2020-05-18T20:15:22.556Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_List (AD_Client_ID,AD_Org_ID,AD_Ref_List_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,Name,Updated,UpdatedBy,Value,ValueName) VALUES (0,0,542141,541142,TO_TIMESTAMP('2020-05-18 23:15:22','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.serviceprovider','Y','Invoiced',TO_TIMESTAMP('2020-05-18 23:15:22','YYYY-MM-DD HH24:MI:SS'),100,'Invoiced','Invoiced')
;
-- 2020-05-18T20:15:22.560Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Ref_List_Trl (AD_Language,AD_Ref_List_ID, Description,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language, t.AD_Ref_List_ID, t.Description,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Ref_List t WHERE l.IsActive='Y'AND (l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N') AND t.AD_Ref_List_ID=542141 AND NOT EXISTS (SELECT 1 FROM AD_Ref_List_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Ref_List_ID=t.AD_Ref_List_ID)
; | the_stack |
DROP FUNCTION IF EXISTS report.umsatzreport_report (IN c_period_id numeric, IN issotrx character varying);
DROP FUNCTION IF EXISTS report.umsatzreport_report (IN c_period_id numeric, IN issotrx character varying, IN M_AttributeSetInstance_ID numeric);
DROP TABLE IF EXISTS report.umsatzreport_report;
DROP FUNCTION IF EXISTS report.Umsatzreport_Report_Sub (IN c_period_id numeric, IN issotrx character varying);
DROP FUNCTION IF EXISTS report.Umsatzreport_Report_Sub (IN c_period_id numeric, IN issotrx character varying, IN M_AttributeSetInstance_ID numeric);
DROP TABLE IF EXISTS report.Umsatzreport_Report_Sub;
CREATE TABLE report.Umsatzreport_Report_Sub
(
name character varying(60),
periodend date,
lastyearperiodend date,
year character varying(10),
lastyear character varying(10),
sameperiodsum numeric,
sameperiodlastyearsum numeric,
perioddifference numeric,
perioddiffpercentage numeric,
sameyearsum numeric,
lastyearsum numeric,
yeardifference numeric,
yeardiffpercentage numeric,
attributesetinstance character varying(60)
)
WITH (
OIDS=FALSE
);
CREATE FUNCTION report.Umsatzreport_Report_Sub(IN c_period_id numeric, IN issotrx character varying, IN M_AttributeSetInstance_ID numeric ) RETURNS SETOF report.Umsatzreport_Report_Sub AS
$BODY$
SELECT
CASE WHEN Length(name) <= 45 THEN name ELSE substring(name FOR 43 ) || '...' END AS name,
PeriodEnd,
LastYearPeriodEnd,
Year,
LastYear,
SamePeriodSum AS SamePeriodSum,
SamePeriodLastYearSum,
SamePeriodSum - SamePeriodLastYearSum AS PeriodDifference,
CASE WHEN SamePeriodSum - SamePeriodLastYearSum != 0 AND SamePeriodLastYearSum != 0
THEN (SamePeriodSum - SamePeriodLastYearSum) / SamePeriodLastYearSum * 100 ELSE NULL
END AS PeriodDiffPercentage,
SameYearSum AS SameYearSum,
LastYearSum AS LastYearSum,
SameYearSum - LastYearSum AS YearDifference,
CASE WHEN SameYearSum - LastYearSum != 0 AND LastYearSum != 0
THEN (SameYearSum - LastYearSum) / LastYearSum * 100 ELSE NULL
END AS YearDiffPercentage,
Attributes as attributesetinstance
FROM
(
SELECT
bp.name,
p.EndDate::Date AS PeriodEnd,
pp.EndDate::Date AS LastYearPeriodEnd,
y.fiscalYear AS Year,
py.fiscalYear AS LastYear,
SUM( CASE WHEN fa.C_Period_ID = p.C_Period_ID THEN AmtAcct ELSE 0 END ) AS SamePeriodSum,
SUM( CASE WHEN fap.C_Year_ID = p.C_Year_ID AND fap.periodNo <= p.PeriodNo THEN AmtAcct ELSE 0 END ) AS SameYearSum,
SUM( CASE WHEN fa.C_Period_ID = pp.C_Period_ID THEN AmtAcct ELSE 0 END ) AS SamePeriodLastYearSum,
SUM( CASE WHEN fap.C_Year_ID = pp.C_Year_ID AND fap.periodNo <= pp.PeriodNo THEN AmtAcct ELSE 0 END ) AS LastYearSum,
att.Attributes
FROM
C_Period p
INNER JOIN C_Year y ON p.C_Year_ID = y.C_Year_ID
-- Get same Period from previous year
LEFT OUTER JOIN C_Period pp ON pp.C_Period_ID = report.Get_Predecessor_Period_Recursive ( p.C_Period_ID,
( SELECT count(0) FROM C_Period sp WHERE sp.C_Year_ID = p.C_Year_ID and isActive = 'Y' )::int )
LEFT OUTER JOIN C_Year py ON pp.C_Year_ID = py.C_Year_ID
-- Get data from fact account
INNER JOIN (
SELECT
fa.M_Product_ID, fa.C_Period_ID, fa.C_BPartner_ID,
CASE WHEN isSOTrx = 'Y' THEN AmtAcctCr - AmtAcctDr ELSE AmtAcctDr - AmtAcctCr END AS AmtAcct,
il.M_AttributeSetInstance_ID
FROM
Fact_Acct fa
JOIN C_Invoice i ON fa.Record_ID = i.C_Invoice_ID
JOIN C_InvoiceLine il ON fa.Line_ID = il.C_InvoiceLine_ID
WHERE
AD_Table_ID = (SELECT Get_Table_ID('C_Invoice'))
AND IsSOtrx = $2
AND (
-- If the given attribute set instance has values set...
CASE WHEN EXISTS ( SELECT ai_value FROM report.fresh_Attributes WHERE M_AttributeSetInstance_ID = $3 )
-- ... then apply following filter:
THEN (
-- Take lines where the attributes of the current InvoiceLine's asi are in the parameter asi and their Values Match
EXISTS (
SELECT 0
FROM report.fresh_Attributes a -- a = Attributes from invoice line, pa = Parameter Attributes
INNER JOIN report.fresh_Attributes pa ON pa.M_AttributeSetInstance_ID = $3
AND a.at_value = pa.at_value -- same attribute
AND a.ai_value = pa.ai_value -- same value
WHERE a.M_AttributeSetInstance_ID = il.M_AttributeSetInstance_ID
)
-- Dismiss lines where the Attributes in the Parameter are not in the InvoiceLine's asi
AND NOT EXISTS (
SELECT 0
FROM report.fresh_Attributes pa
LEFT OUTER JOIN report.fresh_Attributes a ON a.at_value = pa.at_value AND a.ai_value = pa.ai_value
AND a.M_AttributeSetInstance_ID = il.M_AttributeSetInstance_ID
WHERE pa.M_AttributeSetInstance_ID = $3
AND a.M_AttributeSetInstance_ID IS null
)
)
-- ... else deactivate the filter
ELSE TRUE END
)
) fa ON true
INNER JOIN C_Period fap ON fa.C_Period_ID = fap.C_Period_ID
/* Please note: This is an important implicit filter. Inner Joining the Product
* filters Fact Acct records for e.g. Taxes
*/
INNER JOIN M_Product pr ON fa.M_Product_ID = pr.M_Product_ID
AND pr.M_Product_Category_ID != (SELECT value::numeric FROM AD_SysConfig WHERE name = 'PackingMaterialProductCategoryID')
INNER JOIN C_BPartner bp ON fa.C_BPartner_ID = bp.C_BPartner_ID
LEFT OUTER JOIN (
SELECT String_agg ( ai_value, ', ' ORDER BY Length(ai_value), ai_value ) AS Attributes, M_AttributeSetInstance_ID FROM Report.fresh_Attributes
GROUP BY M_AttributeSetInstance_ID
) att ON $3 = att.M_AttributeSetInstance_ID
WHERE
p.C_Period_ID = $1
GROUP BY
bp.name,
p.EndDate,
pp.EndDate,
y.fiscalYear,
py.fiscalYear,
att.Attributes
) a
ORDER BY
SameYearSum DESC$BODY$
LANGUAGE sql STABLE;
DROP FUNCTION IF EXISTS report.umsatzreport_report (IN c_period_id numeric, IN issotrx character varying, IN M_AttributeSetInstance_ID numeric);
DROP TABLE IF EXISTS report.umsatzreport_report;
CREATE TABLE report.umsatzreport_report
(
name character varying(60),
periodend date,
lastyearperiodend date,
year character varying(10),
lastyear character varying(10),
sameperiodsum numeric,
sameperiodlastyearsum numeric,
perioddifference numeric,
perioddiffpercentage numeric,
sameyearsum numeric,
lastyearsum numeric,
yeardifference numeric,
yeardiffpercentage numeric,
attributesetinstance character varying(60),
unionorder integer
)
WITH (
OIDS=FALSE
);
CREATE FUNCTION report.umsatzreport_report (IN c_period_id numeric, IN issotrx character varying, IN M_AttributeSetInstance_ID numeric) RETURNS SETOF report.umsatzreport_report AS
$BODY$
SELECT *, 1 AS UnionOrder FROM report.Umsatzreport_Report_Sub ($1, $2, $3)
UNION ALL
SELECT
null as name,
PeriodEnd,
LastYearPeriodEnd,
Year,
LastYear,
SUM( SamePeriodSum ) AS SamePeriodSum,
SUM( SamePeriodLastYearSum ) AS SamePeriodLastYearSum,
SUM( SamePeriodSum ) - SUM( SamePeriodLastYearSum ) AS PeriodDifference,
CASE WHEN SUM( SamePeriodSum ) - SUM( SamePeriodLastYearSum ) != 0 AND SUM( SamePeriodLastYearSum ) != 0
THEN (SUM( SamePeriodSum ) - SUM( SamePeriodLastYearSum ) ) / SUM( SamePeriodLastYearSum ) * 100 ELSE NULL
END AS PeriodDiffPercentage,
SUM( SameYearSum ) AS SameYearSum,
SUM( LastYearSum ) AS LastYearSum,
SUM( SameYearSum ) - SUM( LastYearSum ) AS YearDifference,
CASE WHEN SUM( SameYearSum ) - SUM( LastYearSum ) != 0 AND SUM( LastYearSum ) != 0
THEN (SUM( SameYearSum ) - SUM( LastYearSum ) ) / SUM( LastYearSum ) * 100 ELSE NULL
END AS YearDiffPercentage,
attributesetinstance,
2 AS UnionOrder
FROM
report.Umsatzreport_Report_Sub ($1, $2, $3)
GROUP BY
PeriodEnd,
LastYearPeriodEnd,
Year,
LastYear,
attributesetinstance
ORDER BY
UnionOrder, SameYearSum DESC
$BODY$
LANGUAGE sql STABLE;
DROP FUNCTION IF EXISTS report.umsatzliste_bpartner_report
(
IN Base_Period_Start date,
IN Base_Period_End date,
IN Comp_Period_Start date,
IN Comp_Period_End date,
IN issotrx character varying,
IN C_BPartner_ID numeric,
IN C_Activity_ID numeric,
IN M_Product_ID numeric,
IN M_Product_Category_ID numeric,
IN M_AttributeSetInstance_ID numeric
);
DROP FUNCTION IF EXISTS report.umsatzliste_bpartner_report_sub
(
IN Base_Period_Start date,
IN Base_Period_End date,
IN Comp_Period_Start date,
IN Comp_Period_End date,
IN issotrx character varying,
IN C_BPartner_ID numeric,
IN C_Activity_ID numeric,
IN M_Product_ID numeric,
IN M_Product_Category_ID numeric,
IN M_AttributeSetInstance_ID numeric
);
DROP TABLE IF EXISTS report.umsatzliste_bpartner_report;
DROP TABLE IF EXISTS report.umsatzliste_bpartner_report_sub;
/* ***************************************************************** */
CREATE TABLE report.umsatzliste_bpartner_report_sub
(
bp_name character varying(60),
pc_name character varying(60),
p_name character varying(255),
sameperiodsum numeric,
compperiodsum numeric,
perioddifference numeric,
perioddiffpercentage numeric,
Base_Period_Start character varying(10),
Base_Period_End character varying(10),
Comp_Period_Start character varying(10),
Comp_Period_End character varying(10),
param_IsSOTrx character varying,
param_bp character varying(60),
param_Activity character varying(60),
param_product character varying(255),
param_Product_Category character varying(60),
Param_Attributes character varying(255)
)
WITH (
OIDS=FALSE
);
CREATE FUNCTION report.umsatzliste_bpartner_report_sub
(
IN Base_Period_Start date,
IN Base_Period_End date,
IN Comp_Period_Start date,
IN Comp_Period_End date,
IN issotrx character varying,
IN C_BPartner_ID numeric,
IN C_Activity_ID numeric,
IN M_Product_ID numeric,
IN M_Product_Category_ID numeric,
IN M_AttributeSetInstance_ID numeric
)
RETURNS SETOF report.umsatzliste_bpartner_report_sub AS
$BODY$
SELECT
bp.Name AS bp_name,
pc.Name AS pc_name,
p.Name AS P_name,
SamePeriodSum,
CompPeriodSum,
SamePeriodSum - CompPeriodSum AS PeriodDifference,
CASE WHEN SamePeriodSum - CompPeriodSum != 0 AND CompPeriodSum != 0
THEN (SamePeriodSum - CompPeriodSum) / CompPeriodSum * 100 ELSE NULL
END AS PeriodDiffPercentage,
to_char($1, 'DD.MM.YYYY') AS Base_Period_Start,
to_char($2, 'DD.MM.YYYY') AS Base_Period_End,
COALESCE( to_char($3, 'DD.MM.YYYY'), '') AS Comp_Period_Start,
COALESCE( to_char($4, 'DD.MM.YYYY'), '') AS Comp_Period_End,
CASE WHEN $5 = 'N' THEN 'Einkauf' WHEN $5 = 'Y' THEN 'Verkauf' ELSE 'alle' END AS param_IsSOTrx,
COALESCE ((SELECT name FROM C_BPartner WHERE C_BPartner_ID = $6), 'alle' ) AS param_bp,
COALESCE ((SELECT name FROM C_Activity WHERE C_Activity_ID = $7), 'alle' ) AS param_Activity,
COALESCE ((SELECT name FROM M_Product WHERE M_Product_ID = $8), 'alle' ) AS param_product,
COALESCE ((SELECT name FROM M_Product_Category WHERE M_Product_Category_ID = $9), 'alle' ) AS param_Product_Category,
COALESCE ((SELECT String_Agg(ai_value, ', ' ORDER BY ai_Value) FROM Report.fresh_Attributes WHERE M_AttributeSetInstance_ID = $10), 'alle') AS Param_Attributes
FROM
(
SELECT
fa.C_BPartner_ID,
fa.M_Product_ID,
SUM( CASE WHEN IsInPeriod THEN AmtAcct ELSE 0 END ) AS SamePeriodSum,
SUM( CASE WHEN IsInCompPeriod THEN AmtAcct ELSE 0 END ) AS CompPeriodSum,
1 AS Line_Order
FROM
(
SELECT fa.*,
( fa.DateAcct >= $1 AND fa.DateAcct <= $2 ) AS IsInPeriod,
( fa.DateAcct >= $3 AND fa.DateAcct <= $4 ) AS IsInCompPeriod,
CASE WHEN isSOTrx = 'Y' THEN AmtAcctCr - AmtAcctDr ELSE AmtAcctDr - AmtAcctCr END AS AmtAcct
FROM Fact_Acct fa JOIN C_Invoice i ON fa.Record_ID = i.C_Invoice_ID
WHERE AD_Table_ID = (SELECT Get_Table_ID('C_Invoice'))
) fa
INNER JOIN C_Invoice i ON fa.Record_ID = i.C_Invoice_ID
INNER JOIN C_InvoiceLine il ON fa.Line_ID = il.C_InvoiceLine_ID
/* Please note: This is an important implicit filter. Inner Joining the Product
* filters Fact Acct records for e.g. Taxes
*/
INNER JOIN M_Product p ON fa.M_Product_ID = p.M_Product_ID
WHERE
AD_Table_ID = ( SELECT Get_Table_ID( 'C_Invoice' ) )
AND ( IsInPeriod OR IsInCompPeriod )
AND i.IsSOtrx = $5
AND ( CASE WHEN $6 IS NULL THEN TRUE ELSE fa.C_BPartner_ID = $6 END )
AND ( CASE WHEN $7 IS NULL THEN TRUE ELSE fa.C_Activity_ID = $7 END )
AND ( CASE WHEN $8 IS NULL THEN TRUE ELSE p.M_Product_ID = $8 END AND p.M_Product_ID IS NOT NULL )
AND ( CASE WHEN $9 IS NULL THEN TRUE ELSE p.M_Product_Category_ID = $9 END
-- It was a requirement to not have HU Packing material within the sums of this report
AND p.M_Product_Category_ID != (SELECT value::numeric FROM AD_SysConfig WHERE name = 'PackingMaterialProductCategoryID')
)
AND (
CASE WHEN EXISTS ( SELECT ai_value FROM report.fresh_Attributes WHERE M_AttributeSetInstance_ID = $10 )
THEN (
EXISTS (
SELECT 0
FROM report.fresh_Attributes a
INNER JOIN report.fresh_Attributes pa ON a.at_value = pa.at_value AND a.ai_value = pa.ai_value
AND pa.M_AttributeSetInstance_ID = $10
WHERE a.M_AttributeSetInstance_ID = il.M_AttributeSetInstance_ID
)
AND NOT EXISTS (
SELECT 0
FROM report.fresh_Attributes pa
LEFT OUTER JOIN report.fresh_Attributes a ON a.at_value = pa.at_value AND a.ai_value = pa.ai_value
AND a.M_AttributeSetInstance_ID = il.M_AttributeSetInstance_ID
WHERE pa.M_AttributeSetInstance_ID = $10
AND a.M_AttributeSetInstance_ID IS null
)
)
ELSE TRUE END
)
GROUP BY
fa.C_BPartner_ID,
fa.M_Product_ID
) a
INNER JOIN C_BPartner bp ON a.C_BPartner_ID = bp.C_BPartner_ID
INNER JOIN M_Product p ON a.M_Product_ID = p.M_Product_ID
INNER JOIN M_Product_Category pc ON p.M_Product_Category_ID = pc.M_Product_Category_ID
$BODY$
LANGUAGE sql STABLE;
/* ***************************************************************** */
CREATE TABLE report.umsatzliste_bpartner_report
(
bp_name character varying(60),
pc_name character varying(60),
p_name character varying(255),
sameperiodsum numeric,
compperiodsum numeric,
perioddifference numeric,
perioddiffpercentage numeric,
Base_Period_Start character varying(10),
Base_Period_End character varying(10),
Comp_Period_Start character varying(10),
Comp_Period_End character varying(10),
param_IsSOTrx character varying,
param_bp character varying(60),
param_Activity character varying(60),
param_product character varying(255),
param_Product_Category character varying(60),
Param_Attributes character varying(255),
unionorder integer
)
WITH (
OIDS=FALSE
);
CREATE FUNCTION report.umsatzliste_bpartner_report
(
IN Base_Period_Start date,
IN Base_Period_End date,
IN Comp_Period_Start date,
IN Comp_Period_End date,
IN issotrx character varying,
IN C_BPartner_ID numeric,
IN C_Activity_ID numeric,
IN M_Product_ID numeric,
IN M_Product_Category_ID numeric,
IN M_AttributeSetInstance_ID numeric
)
RETURNS SETOF report.umsatzliste_bpartner_report AS
$BODY$
SELECT
*, 1 AS UnionOrder
FROM
report.umsatzliste_bpartner_report_sub ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
UNION ALL
SELECT
bp_name, pc_name, null AS P_name,
SUM( SamePeriodSum ) AS SamePeriodSum,
SUM( CompPeriodSum ) AS CompPeriodSum,
SUM( SamePeriodSum ) - SUM( CompPeriodSum ) AS PeriodDifference,
CASE WHEN SUM( SamePeriodSum ) - SUM( CompPeriodSum ) != 0 AND SUM( CompPeriodSum ) != 0
THEN (SUM( SamePeriodSum ) - SUM( CompPeriodSum )) / SUM( CompPeriodSum ) * 100 ELSE NULL
END AS PeriodDiffPercentage,
Base_Period_Start, Base_Period_End, Comp_Period_Start, Comp_Period_End, param_IsSOTrx,
param_bp, param_Activity, param_product, param_Product_Category, Param_Attributes,
2 AS UnionOrder
FROM
report.umsatzliste_bpartner_report_sub ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
GROUP BY
bp_name, pc_name,
Base_Period_Start, Base_Period_End, Comp_Period_Start, Comp_Period_End, param_IsSOTrx,
param_bp, param_Activity, param_product, param_Product_Category, Param_Attributes
UNION ALL
SELECT
bp_name, null, null,
SUM( SamePeriodSum ) AS SamePeriodSum,
SUM( CompPeriodSum ) AS CompPeriodSum,
SUM( SamePeriodSum ) - SUM( CompPeriodSum ) AS PeriodDifference,
CASE WHEN SUM( SamePeriodSum ) - SUM( CompPeriodSum ) != 0 AND SUM( CompPeriodSum ) != 0
THEN (SUM( SamePeriodSum ) - SUM( CompPeriodSum )) / SUM( CompPeriodSum ) * 100 ELSE NULL
END AS PeriodDiffPercentage,
Base_Period_Start, Base_Period_End, Comp_Period_Start, Comp_Period_End, param_IsSOTrx,
param_bp, param_Activity, param_product, param_Product_Category, Param_Attributes,
3 AS UnionOrder
FROM
report.umsatzliste_bpartner_report_sub ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
GROUP BY
bp_name,
Base_Period_Start, Base_Period_End, Comp_Period_Start, Comp_Period_End, param_IsSOTrx,
param_bp, param_Activity, param_product, param_Product_Category, Param_Attributes
ORDER BY
bp_name, pc_name NULLS LAST, UnionOrder, p_name
$BODY$
LANGUAGE sql STABLE; | the_stack |
with items as (
select 'PnL' as item, 0 as item_rank, '1 - PnL' as label
union all
select 'Lending Revenues' as item, 1 as item_rank, '1.1 - Lending Revenues' as label
union all
select 'Liquidations Revenues' as item, 2 as item_rank, '1.2 - Liquidations Revenues' as label
union all
select 'Trading Revenues' as item, 3 as item_rank, '1.3 - Trading Revenues' as label
union all
select 'Lending Expenses' as item, 4 as item_rank, '1.4 - Lending Expenses' as label
union all
select 'Liquidations Expenses' as item, 5 as item_rank, '1.5 - Liquidations Expenses' as label
union all
select 'Workforce Expenses' as item, 6 as item_rank, '1.6 - Workforce Expenses' as label
union all
select 'Net Income' as item, 6 as item_rank, '1.9 - Net Income' as label
union all
select 'Assets' as item, 100 as item_rank, '2 - Assets' as label
union all
select 'Crypto Loans' as item, 101 as item_rank, '2.1 - Crypto Loans' as label
union all
select 'Trading Assets' as item, 102 as item_rank, '2.2 - Trading Assets' as label
union all
select 'Total Assets' as item, 199 as item_rank, '2.9 - Total Assets' as label
union all
select 'Liabilities & Equity' as item, 200 as item_rank, '3 - Liabilities & Equity' as label
union all
select 'Liabilities (DAI)' as item, 201 as item_rank, '3.1 - Liabilities (DAI)' as label
union all
select 'Equity (Surplus Buffer)' as item, 202 as item_rank, '3.2 - Equity (Surplus Buffer)' as label
union all
select 'Total Liabilities & Equity' as item, 299 as item_rank, '3.9 - Total Liabilities & Equity' as label
),
periods as (
select period::date, extract(year from period) as year, extract(month from period) as month
from generate_series('2019-11-01'::date, current_date, '1 months') period
),
maker_addresses as (
select '\xa950524441892a31ebddf91d3ceefa04bf454466'::bytea as address, 'Vow' as name
),
contracts as(
select 'PSM' as contrat_type, '\x89b78cfa322f6c5de0abceecab66aee45393cc5a'::bytea as contract_address -- PM
union all
select distinct 'FlapFlop' as contrat_type, data from makermcd."VOW_call_file0" -- Vow Flappers and Floppers
),
liquidation_excluded_tx as (
select distinct tx_hash
from ethereum."traces"
inner join contracts on "from" = contract_address
where contrat_type in ('PSM', 'FlapFlop')
),
liquidation_revenues as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad/10^45) as revenues
from makermcd."VAT_call_move"
where dst = '\xa950524441892a31ebddf91d3ceefa04bf454466' -- vow
and call_success
and call_tx_hash not in (select * from liquidation_excluded_tx)
group by 1, 2
),
liquidation_expenses as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(tab/10^45) as expenses
from makermcd."VOW_call_fess"
where call_success
group by 1, 2
),
liquidation as (
select year, month,
case when revenues > expenses then revenues - expenses end as liquidation_revenues,
case when revenues < expenses then expenses - revenues end as liquidation_expenses
from liquidation_revenues
full outer join liquidation_expenses using (year, month)
),
trading_tx as (
select distinct call_tx_hash
from makermcd."VAT_call_frob"
where replace(encode(i, 'escape'), '\000', '') = 'PSM-USDC-A'
and call_success
),
trading_revenues as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad)/10^45 as trading_revenues
from makermcd."VAT_call_move"
inner join trading_tx using (call_tx_hash)
where call_success
and dst = '\xa950524441892a31ebddf91d3ceefa04bf454466' -- Vow
group by 1, 2
),
lending_expenses as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad)/10^45 as lending_expenses
from makermcd."VAT_call_suck"
where u = '\xa950524441892a31ebddf91d3ceefa04bf454466' -- Vow
and v = '\x197e90f9fad81970ba7976f33cbd77088e5d7cf7' -- Pot
and call_success
group by 1, 2
),
lending_revenues_1 as (
select i as ilk, call_block_time, call_block_number, dart, null as rate
from makermcd."VAT_call_frob"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time, call_block_number, dart, 0.0 as rate
from makermcd."VAT_call_grab"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time, call_block_number, null as dart, rate
from makermcd."VAT_call_fold"
where call_success
and rate <> 0.0
),
lending_revenues_2 as (
select *, sum(dart) over(partition by ilk order by call_block_number asc) as debt
from lending_revenues_1
),
lending_revenues_3 as (
select replace(encode(ilk, 'escape'), '\000', '') as ilk, extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(debt * rate)/10^45 as lending_revenues
from lending_revenues_2
where rate is not null
group by 1, 2, 3
),
lending_revenues as (
select year, month,
-- Stablescoins vault with fixed value is counted as trading revenues, not sure what to do with USDC-B
sum(case when ilk not in ('USDC-A', 'USDC-B', 'TUSD-A', 'USDT-A', 'PAXUSD-A', 'GUSD-A') then lending_revenues end) as lending_revenues,
sum(case when ilk in ('USDC-A', 'USDC-B', 'TUSD-A', 'USDT-A', 'PAXUSD-A', 'GUSD-A') then lending_revenues end) as stablecoin_lending_revenues
from lending_revenues_3
group by 1, 2
),
-- List all suck operation from executive actions
operating_expenses_suck_tx as (
select distinct call_tx_hash
from makermcd."VAT_call_suck"
where "u" = '\xa950524441892a31ebddf91d3ceefa04bf454466' -- Vow
and "v" = '\xbe8e3e3618f7474f8cb1d074a26affef007e98fb' -- DS Pause Proxy
and call_success
),
-- When a suck operation is used to directly transfer DAI to a third party
operating_expenses_direct as (
select extract(year from evt_block_time) as year, extract(month from evt_block_time) as month, sum(value)/10^18 as operating_expenses_direct
from erc20."ERC20_evt_Transfer"
inner join operating_expenses_suck_tx on evt_tx_hash = call_tx_hash
where "from" = '\x0000000000000000000000000000000000000000'
and "to" not in (
'\x73f09254a81e1f835ee442d1b3262c1f1d7a13ff', -- Interim multisig
'\xbe8e3e3618f7474f8cb1d074a26affef007e98fb', -- DS Pause Proxy
'\x0000000000000000000000000000000000000000' -- DAI ERC-20 minting
)
and contract_address = '\x6b175474e89094c44da98b954eedeac495271d0f' -- DAI ERC-20
group by 1, 2
),
-- When a DAO owned wallet is making an expense
operating_expenses as (
select extract(year from evt_block_time) as year, extract(month from evt_block_time) as month, sum(value)/10^18 as operating_expenses
from erc20."ERC20_evt_Transfer"
where "from" in (
'\x73f09254a81e1f835ee442d1b3262c1f1d7a13ff', -- Interim multisig
'\xbe8e3e3618f7474f8cb1d074a26affef007e98fb' -- DS Pause Proxy
)
and "to" not in (
'\x73f09254a81e1f835ee442d1b3262c1f1d7a13ff', -- Interim multisig
'\xbe8e3e3618f7474f8cb1d074a26affef007e98fb', -- DS Pause Proxy
'\x0000000000000000000000000000000000000000' -- DAI ERC-20 creation from the Maker internal system
)
and contract_address = '\x6b175474e89094c44da98b954eedeac495271d0f' -- DAI ERC-20
group by 1, 2
),
-- When we got incoming money on a DAO owned wallet.
operating_expenses_reverse as (
select extract(year from evt_block_time) as year, extract(month from evt_block_time) as month, sum(value)/10^18 as operating_expenses_reverse
from erc20."ERC20_evt_Transfer"
where "from" not in (
'\x73f09254a81e1f835ee442d1b3262c1f1d7a13ff', -- Interim multisig
'\xbe8e3e3618f7474f8cb1d074a26affef007e98fb', -- DS Pause Proxy
'\x0000000000000000000000000000000000000000' -- DAI ERC-20 creation from the Maker internal system
)
and "to" in (
'\x73f09254a81e1f835ee442d1b3262c1f1d7a13ff', -- Interim multisig
'\xbe8e3e3618f7474f8cb1d074a26affef007e98fb' -- DS Pause Proxy
)
and contract_address = '\x6b175474e89094c44da98b954eedeac495271d0f' -- DAI ERC-20
group by 1, 2
),
------------------------------------------------------------------------------------------
--- ASSETS
-------------------------------------------------------------------------------------------
assets_1 as (
select i as ilk, call_block_time, call_block_number, dart as dart, null as rate
from makermcd."VAT_call_frob"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time, call_block_number, dart as dart, 0.0 as rate
from makermcd."VAT_call_grab"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time, call_block_number, null as dart, rate as rate
from makermcd."VAT_call_fold"
where call_success
and rate <> 0.0
),
assets_2 as (
select ilk, call_block_time, call_block_number,
coalesce(1+sum(rate) over(partition by ilk order by call_block_number asc)/10^27,1) as rate,
sum(dart) over(partition by ilk order by call_block_number asc)/10^18 as dart
from assets_1
),
assets_with_rk as (
select to_char(call_block_time, 'YYYY-MM') as period, extract(year from call_block_time) as year, extract(month from call_block_time) as month,
replace(encode(ilk, 'escape'), '\000', '') as collateral,
dart*rate as asset_value,
row_number() over (partition by ilk, to_char(call_block_time, 'YYYY-MM') order by call_block_time desc) as rk
from assets_2
),
assets_group_by as (
select *
from assets_with_rk
where rk = 1
and asset_value <> 0.0
),
assets as (
select year, month, sum(asset_value) as asset_value,
sum(case when collateral like 'PSM%' or collateral in ('USDC-A','USDC-B','SAI', 'TUSD-A','GUSD-A','PAXUSD-A')
then asset_value end) as other_assets,
sum(case when collateral not like 'PSM%' and collateral not in ('USDC-A','USDC-B','SAI', 'TUSD-A','GUSD-A','PAXUSD-A')
then asset_value end) as crypto_loans
from assets_group_by
group by 1, 2
),
sb_dai_in as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad/10^45) as dai_inflow
from makermcd."VAT_call_move"
where dst in (select address from maker_addresses)
and call_success
group by 1, 2
),
sb_dai_out as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad/10^45) as dai_outflow
from makermcd."VAT_call_move"
where src in (select address from maker_addresses)
and call_success
group by 1, 2
),
sb_sin_out as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad/10^45) as sin_outflow
from makermcd."VAT_call_suck"
where u in (select address from maker_addresses)
and call_success
group by 1, 2
),
sb_sin_in as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(rad/10^45) as sin_inflow
from makermcd."VAT_call_suck"
where v in (select address from maker_addresses)
and call_success
group by 1, 2
),
sb_fess as (
select extract(year from call_block_time) as year, extract(month from call_block_time) as month, sum(tab/10^45) as fess
from makermcd."VOW_call_fess"
where call_success
group by 1, 2
),
sb_accrued_interest_1 as (
select i as ilk, call_block_time, call_block_number, dart, null as rate
from makermcd."VAT_call_frob"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time, call_block_number, dart, 0.0 as rate
from makermcd."VAT_call_grab"
where call_success
and dart <> 0.0
union all
select i as ilk, call_block_time-interval'5 second' /* to be sure frob is called first */, call_block_number, null as dart, rate
from makermcd."VAT_call_fold"
where call_success
and rate <> 0.0
),
sb_accrued_interest_2 as (
select *, sum(dart) over(partition by ilk order by call_block_time asc) as debt
from sb_accrued_interest_1
),
sb_accrued_interest_3 as (
select replace(encode(ilk, 'escape'), '\000', '') as ilk, extract(year from call_block_time) as year,
extract(month from call_block_time) as month, sum(debt * rate)/10^45 as lending_revenues
from sb_accrued_interest_2
where rate is not null
group by 1, 2, 3
),
sb_accrued_interest as (
select year, month, sum(lending_revenues) as accrued_interests
from sb_accrued_interest_3
group by 1, 2
),
sb_fusion as (
select year, month, sum(dai_inflow) as dai_inflow, sum(dai_outflow) as dai_outflow, sum(sin_outflow) as sin_outflow, sum(sin_inflow) as sin_inflow,
sum(fess) as fess, sum(accrued_interests) as accrued_interests
from periods
left join sb_dai_in using (year, month)
left join sb_dai_out using (year, month)
left join sb_sin_out using (year, month)
left join sb_sin_in using (year, month)
left join sb_fess using (year, month)
left join sb_accrued_interest using (year, month)
group by 1, 2
),
sb as (
select year, month,
sum(coalesce(dai_inflow, 0)-coalesce(dai_outflow, 0)-coalesce(sin_outflow, 0)+coalesce(sin_inflow, 0)
-coalesce(fess, 0)+coalesce(accrued_interests, 0)) over (order by year, month) as surplus_buffer
from sb_fusion
)
select extract(year from period) as year, extract(month from period) as month, label as item,
sum(case item
when 'Liquidations Revenues' then liquidation_revenues
when 'Liquidations Expenses' then -liquidation_expenses
when 'Trading Revenues' then coalesce(trading_revenues,0) + coalesce(stablecoin_lending_revenues,0)
when 'Lending Revenues' then lending_revenues
when 'Lending Expenses' then -lending_expenses
when 'Workforce Expenses' then coalesce(-operating_expenses,0)+coalesce(-operating_expenses_direct,0)+coalesce(operating_expenses_reverse,0)
when 'Net Income' then
coalesce(liquidation_revenues,0) + coalesce(-liquidation_expenses,0)
+ coalesce(trading_revenues,0) + coalesce(stablecoin_lending_revenues,0)
+ coalesce(lending_revenues,0) + coalesce(-lending_expenses,0)
+ coalesce(-operating_expenses,0)+coalesce(-operating_expenses_direct,0)+coalesce(operating_expenses_reverse,0)
when 'Crypto Loans' then crypto_loans
when 'Trading Assets' then other_assets
when 'Total Assets' then asset_value
when 'Total Assets' then asset_value
when 'Liabilities (DAI)' then coalesce(asset_value,0)-coalesce(surplus_buffer,0)
when 'Equity (Surplus Buffer)' then coalesce(surplus_buffer,0)
when 'Total Liabilities & Equity' then asset_value
end
) as value
from periods
cross join items
left outer join liquidation using (year, month)
left outer join trading_revenues using (year, month)
left outer join lending_expenses using (year, month)
left outer join lending_revenues using (year, month)
left outer join operating_expenses_direct using (year, month)
left outer join operating_expenses using (year, month)
left outer join operating_expenses_reverse using (year, month)
left outer join assets using (year, month)
left outer join sb using (year, month)
group by 1, 2, 3 | the_stack |
IF EXISTS (SELECT * FROM sys.triggers WHERE parent_class_desc = 'DATABASE' AND name = N'tr_DDL_SchemaChangeLog')
BEGIN
DROP TRIGGER [tr_DDL_SchemaChangeLog] ON DATABASE
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_JobStats' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_JobStats
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_PerfStats' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_PerfStats
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_MemoryUsageStats' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_MemoryUsageStats
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_CPUStats' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_CPUStats
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_CPUProcessAlert' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_CPUProcessAlert
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_LongRunningJobs' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_LongRunningJobs
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_LongRunningQueries' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_LongRunningQueries
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_CheckBlocking' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_CheckBlocking
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_FileStats' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_FileStats
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_CheckFilesWork' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_CheckFilesWork
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_CheckFiles' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_CheckFiles
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_PopulateDataDictionary' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_PopulateDataDictionary
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_UpdateDataDictionaryTable' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_UpdateDataDictionaryTable
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_UpdateDataDictionaryField' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_UpdateDataDictionaryField
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_TestDataDictionaryTables' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_TestDataDictionaryTables
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_TestDataDictionaryFields' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_TestDataDictionaryFields
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_ApplyDataDictionary' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_ApplyDataDictionary
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_ScavengeDataDictionaryTables' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_ScavengeDataDictionaryTables
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'dd_ScavengeDataDictionaryFields' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.dd_ScavengeDataDictionaryFields
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'sp_ViewTableExtendedProperties' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.sp_ViewTableExtendedProperties
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'usp_TodaysDeadlocks' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.usp_TodaysDeadlocks
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'rpt_Queries' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.rpt_Queries
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'rpt_Blocking' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.rpt_Blocking
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'rpt_JobHistory' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.rpt_JobHistory
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'rpt_HealthReport' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.rpt_HealthReport
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'gen_GetHealthReportHTML' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.gen_GetHealthReportHTML
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'gen_GetHealthReportToEmail' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.gen_GetHealthReportToEmail
END
GO
IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.ROUTINES WHERE ROUTINE_NAME = 'gen_GetHealthReportToFile' AND ROUTINE_SCHEMA = 'dbo' AND ROUTINE_TYPE = 'PROCEDURE')
BEGIN
DROP PROC dbo.gen_GetHealthReportToFile
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'DataDictionary_Fields' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.DataDictionary_Fields
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'DataDictionary_Tables' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.DataDictionary_Tables
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'CPUStatsHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.CPUStatsHistory
END
GO
IF EXISTS (SELECT * FROM sys.triggers WHERE object_id = OBJECT_ID(N'[dbo].[ti_blockinghistory]'))
DROP TRIGGER [dbo].[ti_blockinghistory]
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'BlockingHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.BlockingHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'HealthReport' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.HealthReport
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'FileStatsHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.FileStatsHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'JobStatsHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.JobStatsHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'MemoryUsageHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.MemoryUsageHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'PerfStatsHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.PerfStatsHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'QueryHistory' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.QueryHistory
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'SchemaChangeLog' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.SchemaChangeLog
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'AlertSettings' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.AlertSettings
END
GO
IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'DatabaseSettings' AND TABLE_SCHEMA = 'dbo')
BEGIN
DROP TABLE dbo.DatabaseSettings
END
GO
DECLARE @USER VARCHAR(50),
@droploginsql VARCHAR(100),
@sqlexec VARCHAR(max),
@dbname VARCHAR(200)
SELECT name, 0 as status
INTO #databases
FROM sys.databases
WHERE database_id > 4
WHILE EXISTS (SELECT name FROM #databases WHERE status = 0)
BEGIN
SET @dbname = (SELECT top 1 name FROM #databases WHERE status = 0)
SELECT @sqlexec = 'USE '+ char(13) + '[' + name + ']' + CHAR(13)+ CHAR(10) +
+ 'IF EXISTS (SELECT * FROM sys.triggers WHERE [name] = ''tr_DDL_SchemaChangeLog'') DROP TRIGGER tr_DDL_SchemaChangeLog ON DATABASE'
+ CHAR(13) + 'IF EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = ''SchemaChangeLog'' AND TABLE_SCHEMA = ''dbo'') DROP TABLE dbo.SchemaChangeLog'
FROM #databases
WHERE name = @dbname
UPDATE #databases
SET status = 1
WHERE name = @dbname
EXEC (@sqlexec)
END
print 'done'
DROP TABLE #databases
USE [msdb]
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_BlockingAlert')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_BlockingAlert', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_CheckFiles')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_CheckFiles', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_CPUAlert')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_CPUAlert', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_HealthReport')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_HealthReport', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_LongRunningJobsAlert')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_LongRunningJobsAlert', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_LongRunningQueriesAlert')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_LongRunningQueriesAlert', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_MemoryUsageStats')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_MemoryUsageStats', @delete_unused_schedule=1
GO
/****** Object: Job [dbWarden_BlockingAlert] Script Date: 04/15/2013 09:22:26 ******/
IF EXISTS (SELECT job_id FROM msdb.dbo.sysjobs_view WHERE name = N'dbWarden_PerfStats')
EXEC msdb.dbo.sp_delete_job @job_name=N'dbWarden_PerfStats', @delete_unused_schedule=1
GO
IF EXISTS (SELECT name FROM msdb.dbo.sysoperators WHERE name = N'SQL_DBA')
EXEC msdb.dbo.sp_delete_operator @name=N'SQL_DBA'
GO
IF EXISTS (SELECT name FROM msdb.dbo.sysoperators WHERE name = N'SQL_DBA_vtext')
EXEC msdb.dbo.sp_delete_operator @name=N'SQL_DBA_vtext'
GO
DROP DATABASE dbWarden;
GO | the_stack |
--
-- Test for ALTER some_object {RENAME TO, OWNER TO, SET SCHEMA}
--
-- Clean up in case a prior regression run failed
SET client_min_messages TO 'warning';
DROP ROLE IF EXISTS regtest_alter_user1;
DROP ROLE IF EXISTS regtest_alter_user2;
DROP ROLE IF EXISTS regtest_alter_user3;
RESET client_min_messages;
CREATE USER regtest_alter_user3;
CREATE USER regtest_alter_user2;
CREATE USER regtest_alter_user1 IN ROLE regtest_alter_user3;
CREATE SCHEMA alt_nsp1;
CREATE SCHEMA alt_nsp2;
GRANT ALL ON SCHEMA alt_nsp1, alt_nsp2 TO public;
SET search_path = alt_nsp1, public;
--
-- Function and Aggregate
--
SET SESSION AUTHORIZATION regtest_alter_user1;
CREATE FUNCTION alt_func1(int) RETURNS int LANGUAGE sql
AS 'SELECT $1 + 1';
CREATE FUNCTION alt_func2(int) RETURNS int LANGUAGE sql
AS 'SELECT $1 - 1';
CREATE AGGREGATE alt_agg1 (
sfunc1 = int4pl, basetype = int4, stype1 = int4, initcond = 0
);
CREATE AGGREGATE alt_agg2 (
sfunc1 = int4mi, basetype = int4, stype1 = int4, initcond = 0
);
ALTER AGGREGATE alt_func1(int) RENAME TO alt_func3; -- failed (not aggregate)
ALTER AGGREGATE alt_func1(int) OWNER TO regtest_alter_user3; -- failed (not aggregate)
ALTER AGGREGATE alt_func1(int) SET SCHEMA alt_nsp2; -- failed (not aggregate)
ALTER FUNCTION alt_func1(int) RENAME TO alt_func2; -- failed (name conflict)
ALTER FUNCTION alt_func1(int) RENAME TO alt_func3; -- OK
ALTER FUNCTION alt_func2(int) OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER FUNCTION alt_func2(int) OWNER TO regtest_alter_user3; -- OK
ALTER FUNCTION alt_func2(int) SET SCHEMA alt_nsp1; -- OK, already there
ALTER FUNCTION alt_func2(int) SET SCHEMA alt_nsp2; -- OK
ALTER AGGREGATE alt_agg1(int) RENAME TO alt_agg2; -- failed (name conflict)
ALTER AGGREGATE alt_agg1(int) RENAME TO alt_agg3; -- OK
ALTER AGGREGATE alt_agg2(int) OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER AGGREGATE alt_agg2(int) OWNER TO regtest_alter_user3; -- OK
ALTER AGGREGATE alt_agg2(int) SET SCHEMA alt_nsp2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user2;
CREATE FUNCTION alt_func1(int) RETURNS int LANGUAGE sql
AS 'SELECT $1 + 2';
CREATE FUNCTION alt_func2(int) RETURNS int LANGUAGE sql
AS 'SELECT $1 - 2';
CREATE AGGREGATE alt_agg1 (
sfunc1 = int4pl, basetype = int4, stype1 = int4, initcond = 100
);
CREATE AGGREGATE alt_agg2 (
sfunc1 = int4mi, basetype = int4, stype1 = int4, initcond = -100
);
ALTER FUNCTION alt_func3(int) RENAME TO alt_func4; -- failed (not owner)
ALTER FUNCTION alt_func1(int) RENAME TO alt_func4; -- OK
ALTER FUNCTION alt_func3(int) OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER FUNCTION alt_func2(int) OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER FUNCTION alt_func3(int) SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER FUNCTION alt_func2(int) SET SCHEMA alt_nsp2; -- failed (name conflicts)
ALTER AGGREGATE alt_agg3(int) RENAME TO alt_agg4; -- failed (not owner)
ALTER AGGREGATE alt_agg1(int) RENAME TO alt_agg4; -- OK
ALTER AGGREGATE alt_agg3(int) OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER AGGREGATE alt_agg2(int) OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER AGGREGATE alt_agg3(int) SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER AGGREGATE alt_agg2(int) SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT n.nspname, proname, prorettype::regtype, proisagg, a.rolname
FROM pg_proc p, pg_namespace n, pg_authid a
WHERE p.pronamespace = n.oid AND p.proowner = a.oid
AND n.nspname IN ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, proname;
--
-- We would test collations here, but it's not possible because the error
-- messages tend to be nonportable.
--
--
-- Conversion
--
SET SESSION AUTHORIZATION regtest_alter_user1;
CREATE CONVERSION alt_conv1 FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;
CREATE CONVERSION alt_conv2 FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;
ALTER CONVERSION alt_conv1 RENAME TO alt_conv2; -- failed (name conflict)
ALTER CONVERSION alt_conv1 RENAME TO alt_conv3; -- OK
ALTER CONVERSION alt_conv2 OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER CONVERSION alt_conv2 OWNER TO regtest_alter_user3; -- OK
ALTER CONVERSION alt_conv2 SET SCHEMA alt_nsp2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user2;
CREATE CONVERSION alt_conv1 FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;
CREATE CONVERSION alt_conv2 FOR 'LATIN1' TO 'UTF8' FROM iso8859_1_to_utf8;
ALTER CONVERSION alt_conv3 RENAME TO alt_conv4; -- failed (not owner)
ALTER CONVERSION alt_conv1 RENAME TO alt_conv4; -- OK
ALTER CONVERSION alt_conv3 OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER CONVERSION alt_conv2 OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER CONVERSION alt_conv3 SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER CONVERSION alt_conv2 SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT n.nspname, c.conname, a.rolname
FROM pg_conversion c, pg_namespace n, pg_authid a
WHERE c.connamespace = n.oid AND c.conowner = a.oid
AND n.nspname IN ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, conname;
--
-- Foreign Data Wrapper and Foreign Server
--
CREATE FOREIGN DATA WRAPPER alt_fdw1;
CREATE FOREIGN DATA WRAPPER alt_fdw2;
CREATE SERVER alt_fserv1 FOREIGN DATA WRAPPER alt_fdw1;
CREATE SERVER alt_fserv2 FOREIGN DATA WRAPPER alt_fdw2;
ALTER FOREIGN DATA WRAPPER alt_fdw1 RENAME TO alt_fdw2; -- failed (name conflict)
ALTER FOREIGN DATA WRAPPER alt_fdw1 RENAME TO alt_fdw3; -- OK
ALTER SERVER alt_fserv1 RENAME TO alt_fserv2; -- failed (name conflict)
ALTER SERVER alt_fserv1 RENAME TO alt_fserv3; -- OK
SELECT fdwname FROM pg_foreign_data_wrapper WHERE fdwname like 'alt_fdw%';
SELECT srvname FROM pg_foreign_server WHERE srvname like 'alt_fserv%';
--
-- Procedural Language
--
CREATE LANGUAGE alt_lang1 HANDLER plpgsql_call_handler;
CREATE LANGUAGE alt_lang2 HANDLER plpgsql_call_handler;
ALTER LANGUAGE alt_lang1 OWNER TO regtest_alter_user1; -- OK
ALTER LANGUAGE alt_lang2 OWNER TO regtest_alter_user2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user1;
ALTER LANGUAGE alt_lang1 RENAME TO alt_lang2; -- failed (name conflict)
ALTER LANGUAGE alt_lang2 RENAME TO alt_lang3; -- failed (not owner)
ALTER LANGUAGE alt_lang1 RENAME TO alt_lang3; -- OK
ALTER LANGUAGE alt_lang2 OWNER TO regtest_alter_user3; -- failed (not owner)
ALTER LANGUAGE alt_lang3 OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER LANGUAGE alt_lang3 OWNER TO regtest_alter_user3; -- OK
RESET SESSION AUTHORIZATION;
SELECT lanname, a.rolname
FROM pg_language l, pg_authid a
WHERE l.lanowner = a.oid AND l.lanname like 'alt_lang%'
ORDER BY lanname;
--
-- Operator
--
SET SESSION AUTHORIZATION regtest_alter_user1;
CREATE OPERATOR @-@ ( leftarg = int4, rightarg = int4, procedure = int4mi );
CREATE OPERATOR @+@ ( leftarg = int4, rightarg = int4, procedure = int4pl );
ALTER OPERATOR @+@(int4, int4) OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER OPERATOR @+@(int4, int4) OWNER TO regtest_alter_user3; -- OK
ALTER OPERATOR @-@(int4, int4) SET SCHEMA alt_nsp2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user2;
CREATE OPERATOR @-@ ( leftarg = int4, rightarg = int4, procedure = int4mi );
ALTER OPERATOR @+@(int4, int4) OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER OPERATOR @-@(int4, int4) OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER OPERATOR @+@(int4, int4) SET SCHEMA alt_nsp2; -- failed (not owner)
-- can't test this: the error message includes the raw oid of namespace
-- ALTER OPERATOR @-@(int4, int4) SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT n.nspname, oprname, a.rolname,
oprleft::regtype, oprright::regtype, oprcode::regproc
FROM pg_operator o, pg_namespace n, pg_authid a
WHERE o.oprnamespace = n.oid AND o.oprowner = a.oid
AND n.nspname IN ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, oprname;
--
-- OpFamily and OpClass
--
CREATE OPERATOR FAMILY alt_opf1 USING hash;
CREATE OPERATOR FAMILY alt_opf2 USING hash;
ALTER OPERATOR FAMILY alt_opf1 USING hash OWNER TO regtest_alter_user1;
ALTER OPERATOR FAMILY alt_opf2 USING hash OWNER TO regtest_alter_user1;
CREATE OPERATOR CLASS alt_opc1 FOR TYPE uuid USING hash AS STORAGE uuid;
CREATE OPERATOR CLASS alt_opc2 FOR TYPE uuid USING hash AS STORAGE uuid;
ALTER OPERATOR CLASS alt_opc1 USING hash OWNER TO regtest_alter_user1;
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regtest_alter_user1;
SET SESSION AUTHORIZATION regtest_alter_user1;
ALTER OPERATOR FAMILY alt_opf1 USING hash RENAME TO alt_opf2; -- failed (name conflict)
ALTER OPERATOR FAMILY alt_opf1 USING hash RENAME TO alt_opf3; -- OK
ALTER OPERATOR FAMILY alt_opf2 USING hash OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER OPERATOR FAMILY alt_opf2 USING hash OWNER TO regtest_alter_user3; -- OK
ALTER OPERATOR FAMILY alt_opf2 USING hash SET SCHEMA alt_nsp2; -- OK
ALTER OPERATOR CLASS alt_opc1 USING hash RENAME TO alt_opc2; -- failed (name conflict)
ALTER OPERATOR CLASS alt_opc1 USING hash RENAME TO alt_opc3; -- OK
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regtest_alter_user3; -- OK
ALTER OPERATOR CLASS alt_opc2 USING hash SET SCHEMA alt_nsp2; -- OK
RESET SESSION AUTHORIZATION;
CREATE OPERATOR FAMILY alt_opf1 USING hash;
CREATE OPERATOR FAMILY alt_opf2 USING hash;
ALTER OPERATOR FAMILY alt_opf1 USING hash OWNER TO regtest_alter_user2;
ALTER OPERATOR FAMILY alt_opf2 USING hash OWNER TO regtest_alter_user2;
CREATE OPERATOR CLASS alt_opc1 FOR TYPE macaddr USING hash AS STORAGE macaddr;
CREATE OPERATOR CLASS alt_opc2 FOR TYPE macaddr USING hash AS STORAGE macaddr;
ALTER OPERATOR CLASS alt_opc1 USING hash OWNER TO regtest_alter_user2;
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regtest_alter_user2;
SET SESSION AUTHORIZATION regtest_alter_user2;
ALTER OPERATOR FAMILY alt_opf3 USING hash RENAME TO alt_opf4; -- failed (not owner)
ALTER OPERATOR FAMILY alt_opf1 USING hash RENAME TO alt_opf4; -- OK
ALTER OPERATOR FAMILY alt_opf3 USING hash OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER OPERATOR FAMILY alt_opf2 USING hash OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER OPERATOR FAMILY alt_opf3 USING hash SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER OPERATOR FAMILY alt_opf2 USING hash SET SCHEMA alt_nsp2; -- failed (name conflict)
ALTER OPERATOR CLASS alt_opc3 USING hash RENAME TO alt_opc4; -- failed (not owner)
ALTER OPERATOR CLASS alt_opc1 USING hash RENAME TO alt_opc4; -- OK
ALTER OPERATOR CLASS alt_opc3 USING hash OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER OPERATOR CLASS alt_opc2 USING hash OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER OPERATOR CLASS alt_opc3 USING hash SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER OPERATOR CLASS alt_opc2 USING hash SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT nspname, opfname, amname, rolname
FROM pg_opfamily o, pg_am m, pg_namespace n, pg_authid a
WHERE o.opfmethod = m.oid AND o.opfnamespace = n.oid AND o.opfowner = a.oid
AND n.nspname IN ('alt_nsp1', 'alt_nsp2')
AND NOT opfname LIKE 'alt_opc%'
ORDER BY nspname, opfname;
SELECT nspname, opcname, amname, rolname
FROM pg_opclass o, pg_am m, pg_namespace n, pg_authid a
WHERE o.opcmethod = m.oid AND o.opcnamespace = n.oid AND o.opcowner = a.oid
AND n.nspname IN ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, opcname;
-- ALTER OPERATOR FAMILY ... ADD/DROP
-- Should work. Textbook case of CREATE / ALTER ADD / ALTER DROP / DROP
BEGIN TRANSACTION;
CREATE OPERATOR FAMILY alt_opf4 USING btree;
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD
-- int4 vs int2
OPERATOR 1 < (int4, int2) ,
OPERATOR 2 <= (int4, int2) ,
OPERATOR 3 = (int4, int2) ,
OPERATOR 4 >= (int4, int2) ,
OPERATOR 5 > (int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2);
ALTER OPERATOR FAMILY alt_opf4 USING btree DROP
-- int4 vs int2
OPERATOR 1 (int4, int2) ,
OPERATOR 2 (int4, int2) ,
OPERATOR 3 (int4, int2) ,
OPERATOR 4 (int4, int2) ,
OPERATOR 5 (int4, int2) ,
FUNCTION 1 (int4, int2) ;
DROP OPERATOR FAMILY alt_opf4 USING btree;
ROLLBACK;
-- Should fail. Invalid values for ALTER OPERATOR FAMILY .. ADD / DROP
CREATE OPERATOR FAMILY alt_opf4 USING btree;
ALTER OPERATOR FAMILY alt_opf4 USING invalid_index_method ADD OPERATOR 1 < (int4, int2); -- invalid indexing_method
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD OPERATOR 6 < (int4, int2); -- operator number should be between 1 and 5
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD OPERATOR 0 < (int4, int2); -- operator number should be between 1 and 5
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD OPERATOR 1 < ; -- operator without argument types
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD FUNCTION 0 btint42cmp(int4, int2); -- function number should be between 1 and 5
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD FUNCTION 6 btint42cmp(int4, int2); -- function number should be between 1 and 5
ALTER OPERATOR FAMILY alt_opf4 USING btree ADD STORAGE invalid_storage; -- Ensure STORAGE is not a part of ALTER OPERATOR FAMILY
DROP OPERATOR FAMILY alt_opf4 USING btree;
-- Should fail. Need to be SUPERUSER to do ALTER OPERATOR FAMILY .. ADD / DROP
BEGIN TRANSACTION;
CREATE ROLE regtest_alter_user5 NOSUPERUSER;
CREATE OPERATOR FAMILY alt_opf5 USING btree;
SET ROLE regtest_alter_user5;
ALTER OPERATOR FAMILY alt_opf5 USING btree ADD OPERATOR 1 < (int4, int2), FUNCTION 1 btint42cmp(int4, int2);
RESET ROLE;
DROP OPERATOR FAMILY alt_opf5 USING btree;
ROLLBACK;
-- Should fail. Need rights to namespace for ALTER OPERATOR FAMILY .. ADD / DROP
BEGIN TRANSACTION;
CREATE ROLE regtest_alter_user6;
CREATE SCHEMA alt_nsp6;
REVOKE ALL ON SCHEMA alt_nsp6 FROM regtest_alter_user6;
CREATE OPERATOR FAMILY alt_nsp6.alt_opf6 USING btree;
SET ROLE regtest_alter_user6;
ALTER OPERATOR FAMILY alt_nsp6.alt_opf6 USING btree ADD OPERATOR 1 < (int4, int2);
ROLLBACK;
-- Should fail. Only two arguments required for ALTER OPERATOR FAMILY ... DROP OPERATOR
CREATE OPERATOR FAMILY alt_opf7 USING btree;
ALTER OPERATOR FAMILY alt_opf7 USING btree ADD OPERATOR 1 < (int4, int2);
ALTER OPERATOR FAMILY alt_opf7 USING btree DROP OPERATOR 1 (int4, int2, int8);
DROP OPERATOR FAMILY alt_opf7 USING btree;
-- Should work. During ALTER OPERATOR FAMILY ... DROP OPERATOR
-- when left type is the same as right type, a DROP with only one argument type should work
CREATE OPERATOR FAMILY alt_opf8 USING btree;
ALTER OPERATOR FAMILY alt_opf8 USING btree ADD OPERATOR 1 < (int4, int4);
DROP OPERATOR FAMILY alt_opf8 USING btree;
-- Should work. Textbook case of ALTER OPERATOR FAMILY ... ADD OPERATOR with FOR ORDER BY
CREATE OPERATOR FAMILY alt_opf9 USING gist;
ALTER OPERATOR FAMILY alt_opf9 USING gist ADD OPERATOR 1 < (int4, int4) FOR ORDER BY float_ops;
DROP OPERATOR FAMILY alt_opf9 USING gist;
-- Should fail. Ensure correct ordering methods in ALTER OPERATOR FAMILY ... ADD OPERATOR .. FOR ORDER BY
CREATE OPERATOR FAMILY alt_opf10 USING btree;
ALTER OPERATOR FAMILY alt_opf10 USING btree ADD OPERATOR 1 < (int4, int4) FOR ORDER BY float_ops;
DROP OPERATOR FAMILY alt_opf10 USING btree;
-- Should work. Textbook case of ALTER OPERATOR FAMILY ... ADD OPERATOR with FOR ORDER BY
CREATE OPERATOR FAMILY alt_opf11 USING gist;
ALTER OPERATOR FAMILY alt_opf11 USING gist ADD OPERATOR 1 < (int4, int4) FOR ORDER BY float_ops;
ALTER OPERATOR FAMILY alt_opf11 USING gist DROP OPERATOR 1 (int4, int4);
DROP OPERATOR FAMILY alt_opf11 USING gist;
-- Should fail. btree comparison functions should return INTEGER in ALTER OPERATOR FAMILY ... ADD FUNCTION
BEGIN TRANSACTION;
CREATE OPERATOR FAMILY alt_opf12 USING btree;
CREATE FUNCTION fn_opf12 (int4, int2) RETURNS BIGINT AS 'SELECT NULL::BIGINT;' LANGUAGE SQL;
ALTER OPERATOR FAMILY alt_opf12 USING btree ADD FUNCTION 1 fn_opf12(int4, int2);
DROP OPERATOR FAMILY alt_opf12 USING btree;
ROLLBACK;
-- Should fail. hash comparison functions should return INTEGER in ALTER OPERATOR FAMILY ... ADD FUNCTION
BEGIN TRANSACTION;
CREATE OPERATOR FAMILY alt_opf13 USING hash;
CREATE FUNCTION fn_opf13 (int4) RETURNS BIGINT AS 'SELECT NULL::BIGINT;' LANGUAGE SQL;
ALTER OPERATOR FAMILY alt_opf13 USING hash ADD FUNCTION 1 fn_opf13(int4);
DROP OPERATOR FAMILY alt_opf13 USING hash;
ROLLBACK;
-- Should fail. btree comparison functions should have two arguments in ALTER OPERATOR FAMILY ... ADD FUNCTION
BEGIN TRANSACTION;
CREATE OPERATOR FAMILY alt_opf14 USING btree;
CREATE FUNCTION fn_opf14 (int4) RETURNS BIGINT AS 'SELECT NULL::BIGINT;' LANGUAGE SQL;
ALTER OPERATOR FAMILY alt_opf14 USING btree ADD FUNCTION 1 fn_opf14(int4);
DROP OPERATOR FAMILY alt_opf14 USING btree;
ROLLBACK;
-- Should fail. hash comparison functions should have one argument in ALTER OPERATOR FAMILY ... ADD FUNCTION
BEGIN TRANSACTION;
CREATE OPERATOR FAMILY alt_opf15 USING hash;
CREATE FUNCTION fn_opf15 (int4, int2) RETURNS BIGINT AS 'SELECT NULL::BIGINT;' LANGUAGE SQL;
ALTER OPERATOR FAMILY alt_opf15 USING hash ADD FUNCTION 1 fn_opf15(int4, int2);
DROP OPERATOR FAMILY alt_opf15 USING hash;
ROLLBACK;
-- Should fail. In gist throw an error when giving different data types for function argument
-- without defining left / right type in ALTER OPERATOR FAMILY ... ADD FUNCTION
CREATE OPERATOR FAMILY alt_opf16 USING gist;
ALTER OPERATOR FAMILY alt_opf16 USING gist ADD FUNCTION 1 btint42cmp(int4, int2);
DROP OPERATOR FAMILY alt_opf16 USING gist;
-- Should fail. duplicate operator number / function number in ALTER OPERATOR FAMILY ... ADD FUNCTION
CREATE OPERATOR FAMILY alt_opf17 USING btree;
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD OPERATOR 1 < (int4, int4), OPERATOR 1 < (int4, int4); -- operator # appears twice in same statement
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD OPERATOR 1 < (int4, int4); -- operator 1 requested first-time
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD OPERATOR 1 < (int4, int4); -- operator 1 requested again in separate statement
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD
OPERATOR 1 < (int4, int2) ,
OPERATOR 2 <= (int4, int2) ,
OPERATOR 3 = (int4, int2) ,
OPERATOR 4 >= (int4, int2) ,
OPERATOR 5 > (int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2); -- procedure 1 appears twice in same statement
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD
OPERATOR 1 < (int4, int2) ,
OPERATOR 2 <= (int4, int2) ,
OPERATOR 3 = (int4, int2) ,
OPERATOR 4 >= (int4, int2) ,
OPERATOR 5 > (int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2); -- procedure 1 appears first time
ALTER OPERATOR FAMILY alt_opf17 USING btree ADD
OPERATOR 1 < (int4, int2) ,
OPERATOR 2 <= (int4, int2) ,
OPERATOR 3 = (int4, int2) ,
OPERATOR 4 >= (int4, int2) ,
OPERATOR 5 > (int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2); -- procedure 1 requested again in separate statement
DROP OPERATOR FAMILY alt_opf17 USING btree;
-- Should fail. Ensure that DROP requests for missing OPERATOR / FUNCTIONS
-- return appropriate message in ALTER OPERATOR FAMILY ... DROP OPERATOR / FUNCTION
CREATE OPERATOR FAMILY alt_opf18 USING btree;
ALTER OPERATOR FAMILY alt_opf18 USING btree DROP OPERATOR 1 (int4, int4);
ALTER OPERATOR FAMILY alt_opf18 USING btree ADD
OPERATOR 1 < (int4, int2) ,
OPERATOR 2 <= (int4, int2) ,
OPERATOR 3 = (int4, int2) ,
OPERATOR 4 >= (int4, int2) ,
OPERATOR 5 > (int4, int2) ,
FUNCTION 1 btint42cmp(int4, int2);
ALTER OPERATOR FAMILY alt_opf18 USING btree DROP FUNCTION 2 (int4, int4);
DROP OPERATOR FAMILY alt_opf18 USING btree;
--
-- Text Search Dictionary
--
SET SESSION AUTHORIZATION regtest_alter_user1;
CREATE TEXT SEARCH DICTIONARY alt_ts_dict1 (template=simple);
CREATE TEXT SEARCH DICTIONARY alt_ts_dict2 (template=simple);
ALTER TEXT SEARCH DICTIONARY alt_ts_dict1 RENAME TO alt_ts_dict2; -- failed (name conflict)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict1 RENAME TO alt_ts_dict3; -- OK
ALTER TEXT SEARCH DICTIONARY alt_ts_dict2 OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict2 OWNER TO regtest_alter_user3; -- OK
ALTER TEXT SEARCH DICTIONARY alt_ts_dict2 SET SCHEMA alt_nsp2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user2;
CREATE TEXT SEARCH DICTIONARY alt_ts_dict1 (template=simple);
CREATE TEXT SEARCH DICTIONARY alt_ts_dict2 (template=simple);
ALTER TEXT SEARCH DICTIONARY alt_ts_dict3 RENAME TO alt_ts_dict4; -- failed (not owner)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict1 RENAME TO alt_ts_dict4; -- OK
ALTER TEXT SEARCH DICTIONARY alt_ts_dict3 OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict2 OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict3 SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER TEXT SEARCH DICTIONARY alt_ts_dict2 SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT nspname, dictname, rolname
FROM pg_ts_dict t, pg_namespace n, pg_authid a
WHERE t.dictnamespace = n.oid AND t.dictowner = a.oid
AND n.nspname in ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, dictname;
--
-- Text Search Configuration
--
SET SESSION AUTHORIZATION regtest_alter_user1;
CREATE TEXT SEARCH CONFIGURATION alt_ts_conf1 (copy=english);
CREATE TEXT SEARCH CONFIGURATION alt_ts_conf2 (copy=english);
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf1 RENAME TO alt_ts_conf2; -- failed (name conflict)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf1 RENAME TO alt_ts_conf3; -- OK
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf2 OWNER TO regtest_alter_user2; -- failed (no role membership)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf2 OWNER TO regtest_alter_user3; -- OK
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf2 SET SCHEMA alt_nsp2; -- OK
SET SESSION AUTHORIZATION regtest_alter_user2;
CREATE TEXT SEARCH CONFIGURATION alt_ts_conf1 (copy=english);
CREATE TEXT SEARCH CONFIGURATION alt_ts_conf2 (copy=english);
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf3 RENAME TO alt_ts_conf4; -- failed (not owner)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf1 RENAME TO alt_ts_conf4; -- OK
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf3 OWNER TO regtest_alter_user2; -- failed (not owner)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf2 OWNER TO regtest_alter_user3; -- failed (no role membership)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf3 SET SCHEMA alt_nsp2; -- failed (not owner)
ALTER TEXT SEARCH CONFIGURATION alt_ts_conf2 SET SCHEMA alt_nsp2; -- failed (name conflict)
RESET SESSION AUTHORIZATION;
SELECT nspname, cfgname, rolname
FROM pg_ts_config t, pg_namespace n, pg_authid a
WHERE t.cfgnamespace = n.oid AND t.cfgowner = a.oid
AND n.nspname in ('alt_nsp1', 'alt_nsp2')
ORDER BY nspname, cfgname;
--
-- Text Search Template
--
CREATE TEXT SEARCH TEMPLATE alt_ts_temp1 (lexize=dsimple_lexize);
CREATE TEXT SEARCH TEMPLATE alt_ts_temp2 (lexize=dsimple_lexize);
ALTER TEXT SEARCH TEMPLATE alt_ts_temp1 RENAME TO alt_ts_temp2; -- failed (name conflict)
ALTER TEXT SEARCH TEMPLATE alt_ts_temp1 RENAME TO alt_ts_temp3; -- OK
ALTER TEXT SEARCH TEMPLATE alt_ts_temp2 SET SCHEMA alt_nsp2; -- OK
CREATE TEXT SEARCH TEMPLATE alt_ts_temp2 (lexize=dsimple_lexize);
ALTER TEXT SEARCH TEMPLATE alt_ts_temp2 SET SCHEMA alt_nsp2; -- failed (name conflict)
SELECT nspname, tmplname
FROM pg_ts_template t, pg_namespace n
WHERE t.tmplnamespace = n.oid AND nspname like 'alt_nsp%'
ORDER BY nspname, tmplname;
--
-- Text Search Parser
--
CREATE TEXT SEARCH PARSER alt_ts_prs1
(start = prsd_start, gettoken = prsd_nexttoken, end = prsd_end, lextypes = prsd_lextype);
CREATE TEXT SEARCH PARSER alt_ts_prs2
(start = prsd_start, gettoken = prsd_nexttoken, end = prsd_end, lextypes = prsd_lextype);
ALTER TEXT SEARCH PARSER alt_ts_prs1 RENAME TO alt_ts_prs2; -- failed (name conflict)
ALTER TEXT SEARCH PARSER alt_ts_prs1 RENAME TO alt_ts_prs3; -- OK
ALTER TEXT SEARCH PARSER alt_ts_prs2 SET SCHEMA alt_nsp2; -- OK
CREATE TEXT SEARCH PARSER alt_ts_prs2
(start = prsd_start, gettoken = prsd_nexttoken, end = prsd_end, lextypes = prsd_lextype);
ALTER TEXT SEARCH PARSER alt_ts_prs2 SET SCHEMA alt_nsp2; -- failed (name conflict)
SELECT nspname, prsname
FROM pg_ts_parser t, pg_namespace n
WHERE t.prsnamespace = n.oid AND nspname like 'alt_nsp%'
ORDER BY nspname, prsname;
---
--- Cleanup resources
---
DROP FOREIGN DATA WRAPPER alt_fdw2 CASCADE;
DROP FOREIGN DATA WRAPPER alt_fdw3 CASCADE;
DROP LANGUAGE alt_lang2 CASCADE;
DROP LANGUAGE alt_lang3 CASCADE;
DROP LANGUAGE alt_lang4 CASCADE;
DROP SCHEMA alt_nsp1 CASCADE;
DROP SCHEMA alt_nsp2 CASCADE;
DROP USER regtest_alter_user1;
DROP USER regtest_alter_user2;
DROP USER regtest_alter_user3; | the_stack |
-- 11.02.2016 16:30
-- URL zum Konzept
INSERT INTO AD_Element (AD_Client_ID,AD_Element_ID,AD_Org_ID,ColumnName,Created,CreatedBy,Description,EntityType,IsActive,Name,PrintName,Updated,UpdatedBy) VALUES (0,542973,0,'IsCreateDefaultPOReference',TO_TIMESTAMP('2016-02-11 16:30:34','YYYY-MM-DD HH24:MI:SS'),100,'Erlaubt es, bei einem neuen Auftrag automatisch das Refernz-Feld des Auftrag vorzubelegen','de.metas.order','Y','Autom. Referenz-Wert in Auftrag','Autom. Referenz-Wert in Auftrag',TO_TIMESTAMP('2016-02-11 16:30:34','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 11.02.2016 16:30
-- URL zum Konzept
INSERT INTO AD_Element_Trl (AD_Language,AD_Element_ID, Description,Help,Name,PO_Description,PO_Help,PO_Name,PO_PrintName,PrintName, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Element_ID, t.Description,t.Help,t.Name,t.PO_Description,t.PO_Help,t.PO_Name,t.PO_PrintName,t.PrintName, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Element t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Element_ID=542973 AND NOT EXISTS (SELECT * FROM AD_Element_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Element_ID=t.AD_Element_ID)
;
-- 11.02.2016 16:30
-- URL zum Konzept
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,Created,CreatedBy,DDL_NoForeignKey,DefaultValue,Description,EntityType,FieldLength,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SeqNo,Updated,UpdatedBy,Version) VALUES (0,553170,542973,0,20,291,'N','IsCreateDefaultPOReference',TO_TIMESTAMP('2016-02-11 16:30:59','YYYY-MM-DD HH24:MI:SS'),100,'N','N','Erlaubt es, bei einem neuen Auftrag automatisch das Refernz-Feld des Auftrag vorzubelegen','de.metas.order',1,'Y','N','Y','N','N','N','N','N','N','N','N','N','N','Y','N','N','N','N','N','Y','N','Autom. Referenz-Wert in Auftrag',0,TO_TIMESTAMP('2016-02-11 16:30:59','YYYY-MM-DD HH24:MI:SS'),100,0)
;
-- 11.02.2016 16:30
-- URL zum Konzept
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=553170 AND NOT EXISTS (SELECT * FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Element SET Description='Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.',Updated=TO_TIMESTAMP('2016-02-11 20:31:15','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Element_ID=542973
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Element_Trl SET IsTranslated='N' WHERE AD_Element_ID=542973
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Column SET ColumnName='IsCreateDefaultPOReference', Name='Autom. Referenz-Wert in Auftrag', Description='Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.', Help=NULL WHERE AD_Element_ID=542973
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Process_Para SET ColumnName='IsCreateDefaultPOReference', Name='Autom. Referenz-Wert in Auftrag', Description='Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.', Help=NULL, AD_Element_ID=542973 WHERE UPPER(ColumnName)='ISCREATEDEFAULTPOREFERENCE' AND IsCentrallyMaintained='Y' AND AD_Element_ID IS NULL
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Process_Para SET ColumnName='IsCreateDefaultPOReference', Name='Autom. Referenz-Wert in Auftrag', Description='Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.', Help=NULL WHERE AD_Element_ID=542973 AND IsCentrallyMaintained='Y'
;
-- 11.02.2016 20:31
-- URL zum Konzept
UPDATE AD_Field SET Name='Autom. Referenz-Wert in Auftrag', Description='Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.', Help=NULL WHERE AD_Column_ID IN (SELECT AD_Column_ID FROM AD_Column WHERE AD_Element_ID=542973) AND IsCentrallyMaintained='Y'
;
-- 11.02.2016 20:35
-- URL zum Konzept
INSERT INTO AD_Element (AD_Client_ID,AD_Element_ID,AD_Org_ID,ColumnName,Created,CreatedBy,Description,EntityType,Help,IsActive,Name,PrintName,Updated,UpdatedBy) VALUES (0,542974,0,'POReferencePattern',TO_TIMESTAMP('2016-02-11 20:35:18','YYYY-MM-DD HH24:MI:SS'),100,'Der Wert dieses Feldes wird mit der Auftrags-Belegnummer kombiniert, um die Auftragsreferenz zu erzeugen','de.metas.order','Beispiel:
<ul>
<li>Vorlage: 00600000000</li>
<li>Auftragsnumer: 12345</li>
<li>Erzeugte Referenz: 00600012345</li>
</ul>','Y','Auftrag Referenz-Wert Vorlage','Auftrag Referenz-Wert Vorlage',TO_TIMESTAMP('2016-02-11 20:35:18','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 11.02.2016 20:35
-- URL zum Konzept
INSERT INTO AD_Element_Trl (AD_Language,AD_Element_ID, Description,Help,Name,PO_Description,PO_Help,PO_Name,PO_PrintName,PrintName, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Element_ID, t.Description,t.Help,t.Name,t.PO_Description,t.PO_Help,t.PO_Name,t.PO_PrintName,t.PrintName, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Element t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Element_ID=542974 AND NOT EXISTS (SELECT * FROM AD_Element_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Element_ID=t.AD_Element_ID)
;
-- 11.02.2016 20:36
-- URL zum Konzept
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,Created,CreatedBy,DDL_NoForeignKey,Description,EntityType,FieldLength,Help,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SeqNo,Updated,UpdatedBy,Version) VALUES (0,553171,542974,0,10,291,'N','POReferencePattern',TO_TIMESTAMP('2016-02-11 20:36:33','YYYY-MM-DD HH24:MI:SS'),100,'N','Der Wert dieses Feldes wird mit der Auftrags-Belegnummer kombiniert, um die Auftragsreferenz zu erzeugen','de.metas.order',40,'Beispiel:
<ul>
<li>Vorlage: 00600000000</li>
<li>Auftragsnumer: 12345</li>
<li>Erzeugte Referenz: 00600012345</li>
</ul>','Y','N','Y','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','Y','N','Auftrag Referenz-Wert Vorlage',0,TO_TIMESTAMP('2016-02-11 20:36:33','YYYY-MM-DD HH24:MI:SS'),100,0)
;
-- 11.02.2016 20:36
-- URL zum Konzept
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=553171 AND NOT EXISTS (SELECT * FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 16.02.2016 07:13
-- URL zum Konzept
UPDATE AD_Field SET SeqNo=123, SeqNoGrid=113,Updated=TO_TIMESTAMP('2016-02-16 07:13:22','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=2124
;
-- 16.02.2016 07:14
-- URL zum Konzept
UPDATE AD_Field SET IsSameLine='Y',Updated=TO_TIMESTAMP('2016-02-16 07:14:11','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=553178
;
-- 12.02.2016 20:47
-- URL zum Konzept
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,EntityType,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy) VALUES (0,553170,556620,0,220,0,TO_TIMESTAMP('2016-02-12 20:47:40','YYYY-MM-DD HH24:MI:SS'),100,'Erlaubt es, bei einem neuen Auftrag automatisch das Referenz-Feld des Auftrags vorzubelegen.',0,'de.metas.order',0,'Y','Y','Y','Y','N','N','N','N','N','Autom. Referenz-Wert in Auftrag',163,173,0,1,1,TO_TIMESTAMP('2016-02-12 20:47:40','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 20:47
-- URL zum Konzept
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=556620 AND NOT EXISTS (SELECT * FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
;
-- 12.02.2016 20:48
-- URL zum Konzept
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,DisplayLogic,EntityType,Help,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy) VALUES (0,553171,556621,0,220,0,TO_TIMESTAMP('2016-02-12 20:48:36','YYYY-MM-DD HH24:MI:SS'),100,'Der Wert dieses Feldes wird mit der Auftrags-Belegnummer kombiniert, um die Auftragsreferenz zu erzeugen',0,'@IsCreateDefaultPOReference@=''Y''','de.metas.order','Beispiel:
<ul>
<li>Vorlage: 00600000000</li>
<li>Auftragsnumer: 12345</li>
<li>Erzeugte Referenz: 00600012345</li>
</ul>',0,'Y','Y','Y','Y','N','N','N','N','Y','Auftrag Referenz-Wert Vorlage',165,175,0,1,1,TO_TIMESTAMP('2016-02-12 20:48:36','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 12.02.2016 20:48
-- URL zum Konzept
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=556621 AND NOT EXISTS (SELECT * FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
; | the_stack |
-- Pluscode implementation for PostgreSQL
--
--
-- Licensed under the Apache License, Version 2.0 (the 'License');
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an 'AS IS' BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
--
-- pluscode_cliplatitude ####
-- Clip latitude between -90 and 90 degrees.
-- PARAMETERS
-- lat numeric // latitude to use for the reference location
-- EXAMPLE
-- select pluscode_cliplatitude(149.18);
CREATE OR REPLACE FUNCTION public.pluscode_cliplatitude(
lat numeric)
RETURNS numeric
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
BEGIN
IF lat < -90 THEN
RETURN -90;
END IF;
IF lat > 90 THEN
RETURN 90;
ELSE
RETURN lat;
END IF;
END;
$BODY$;
-- pluscode_normalizelongitude ####
-- Normalize a longitude between -180 and 180 degrees (180 excluded).
-- PARAMETERS
-- lng numeric // longitude to use for the reference location
-- EXAMPLE
-- select pluscode_normalizelongitude(188.18);
CREATE OR REPLACE FUNCTION public.pluscode_normalizelongitude(
lng numeric)
RETURNS numeric
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
BEGIN
WHILE (lng < -180) LOOP
lng := lng + 360;
END LOOP;
WHILE (lng >= 180) LOOP
lng := lng - 360;
END LOOP;
return lng;
END;
$BODY$;
-- pluscode_isvalid ####
-- Check if the code is valid
-- PARAMETERS
-- code text // a pluscode
-- EXAMPLE
-- select pluscode_isvalid('XX5JJC23+00');
CREATE OR REPLACE FUNCTION public.pluscode_isvalid(
code text)
RETURNS boolean
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
separator_ text := '+';
separator_position int := 8;
padding_char text:= '0';
padding_int_pos integer:=0;
padding_one_int_pos integer:=0;
stripped_code text := replace(replace(code,'0',''),'+','');
code_alphabet_ text := '23456789CFGHJMPQRVWX';
idx int := 1;
BEGIN
code := code::text;
--Code Without "+" char
IF (POSITION(separator_ in code) = 0) THEN
RETURN FALSE;
END IF;
--Code beginning with "+" char
IF (POSITION(separator_ in code) = 1) THEN
RETURN FALSE;
END IF;
--Code with illegal position separator
IF ( (POSITION(separator_ in code) > separator_position+1) OR ((POSITION(separator_ in code)-1) % 2 = 1) ) THEN
RETURN FALSE;
END IF;
--Code contains padding characters "0"
IF (POSITION(padding_char in code) > 0) THEN
IF (POSITION(separator_ in code) < 9) THEN
RETURN FALSE;
END IF;
IF (POSITION(separator_ in code) = 1) THEN
RETURN FALSE;
END IF;
--Check if there are many "00" groups (only one is legal)
padding_int_pos := (select ROW_NUMBER() OVER( ORDER BY REGEXP_MATCHES(code,'('||padding_char||'+)' ,'g') ) order by 1 DESC limit 1);
padding_one_int_pos := char_length( (select REGEXP_MATCHES(code,'('||padding_char||'+)' ,'g') limit 1)[1] );
IF (padding_int_pos > 1 ) THEN
RETURN FALSE;
END IF;
--Check if the first group is % 2 = 0
IF ((padding_one_int_pos % 2) = 1 ) THEN
RETURN FALSE;
END IF;
--Lastchar is a separator
IF (RIGHT(code,1) <> separator_) THEN
RETURN FALSE;
END IF;
END IF;
--If there is just one char after '+'
IF (char_length(code) - POSITION(separator_ in code) = 1 ) THEN
RETURN FALSE;
END IF;
--Check if each char is in code_alphabet_
FOR i IN 1..char_length(stripped_code) LOOP
IF (POSITION( UPPER(substring(stripped_code from i for 1)) in code_alphabet_ ) = 0) THEN
RETURN FALSE;
END IF;
END LOOP;
RETURN TRUE;
END;
$BODY$;
-- pluscode_codearea ####
-- Coordinates of a decoded pluscode.
-- PARAMETERS
-- latitudelo numeric // lattitude low of the pluscode
-- longitudelo numeric // longitude low of the pluscode
-- latitudehi numeric // lattitude high of the pluscode
-- longitudehi numeric // longitude high of the pluscode
-- codelength integer // length of the pluscode
-- EXAMPLE
-- select pluscode_codearea(49.1805,-0.378625,49.180625,-0.3785,10::int);
CREATE OR REPLACE FUNCTION public.pluscode_codearea(
latitudelo numeric,
longitudelo numeric,
latitudehi numeric,
longitudehi numeric,
codelength integer)
RETURNS TABLE(lat_lo numeric, lng_lo numeric, lat_hi numeric, lng_hi numeric, code_length numeric, lat_center numeric, lng_center numeric)
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
ROWS 1000
AS $BODY$
DECLARE
rlatitudeLo numeric:= latitudeLo;
rlongitudeLo numeric:= longitudeLo;
rlatitudeHi numeric:= latitudeHi;
rlongitudeHi numeric:= longitudeHi;
rcodeLength numeric:= codeLength;
rlatitudeCenter numeric:= 0;
rlongitudeCenter numeric:= 0;
latitude_max_ int:= 90;
longitude_max_ int:= 180;
BEGIN
--calculate the latitude center
IF (((latitudeLo + (latitudeHi - latitudeLo))/ 2) > latitude_max_) THEN
rlatitudeCenter := latitude_max_;
ELSE
rlatitudeCenter := (latitudeLo + (latitudeHi - latitudeLo)/ 2);
END IF;
--calculate the longitude center
IF (((longitudeLo + (longitudeHi - longitudeLo))/ 2) > longitude_max_) THEN
rlongitudeCenter := longitude_max_;
ELSE
rlongitudeCenter := (longitudeLo + (longitudeHi - longitudeLo)/ 2);
END IF;
RETURN QUERY SELECT
rlatitudeLo::double precision::numeric as lat_lo,
rlongitudeLo::double precision::numeric as lng_lo,
rlatitudeHi::double precision::numeric as lat_hi,
rlongitudeHi::double precision::numeric as lng_hi,
rcodeLength as code_length,
rlatitudeCenter::double precision::numeric,
rlongitudeCenter::double precision::numeric;
END;
$BODY$;
-- pluscode_isshort ####
-- Check if the code is a short version of a pluscode
-- PARAMETERS
-- code text // a valid pluscode
-- EXAMPLE
-- select pluscode_isshort('XX5JJC+');
CREATE OR REPLACE FUNCTION public.pluscode_isshort(
code text)
RETURNS boolean
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
separator_ text := '+';
separator_position int := 9;
BEGIN
-- the pluscode is valid ?
IF (pluscode_isvalid(code)) is FALSE THEN
RETURN FALSE;
END IF;
-- the pluscode contain a '+' at a correct place
IF ((POSITION(separator_ in code)>0) AND (POSITION(separator_ in code)< separator_position)) THEN
RETURN TRUE;
END IF;
RETURN FALSE;
END;
$BODY$;
-- pluscode_isfull ####
-- Is the codeplus a full code
-- PARAMETERS
-- code text // codeplus
-- EXAMPLE
-- select pluscode_isfull('cccccc+')
CREATE OR REPLACE FUNCTION public.pluscode_isfull(
code text)
RETURNS boolean
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
code_alphabet text := '23456789CFGHJMPQRVWX';
first_lat_val int:= 0;
first_lng_val int:= 0;
encoding_base_ int := char_length(code_alphabet);
latitude_max_ int := 90;
longitude_max_ int := 180;
BEGIN
IF (pluscode_isvalid(code)) is FALSE THEN
RETURN FALSE;
END IF;
-- If is short --> not full.
IF (pluscode_isshort(code)) is TRUE THEN
RETURN FALSE;
END IF;
--Check latitude for first lat char
first_lat_val := (POSITION( UPPER(LEFT(code,1)) IN code_alphabet )-1) * encoding_base_;
IF (first_lat_val >= latitude_max_ * 2) THEN
RETURN FALSE;
END IF;
IF (char_length(code) > 1) THEN
--Check longitude for first lng char
first_lng_val := (POSITION( UPPER(SUBSTRING(code FROM 2 FOR 1)) IN code_alphabet)-1) * encoding_base_;
IF (first_lng_val >= longitude_max_ *2) THEN
RETURN FALSE;
END IF;
END IF;
RETURN TRUE;
END;
$BODY$;
-- pluscode_encode ####
-- Encode lat lng to get pluscode
-- PARAMETERS
-- _lat numeric // latitude ref
-- _lng numeric // longitude ref
-- _codelength int// How long must be the pluscode
-- EXAMPLE
-- select pluscode_encode(49.05,-0.108,12);
CREATE OR REPLACE FUNCTION public.pluscode_encode(
_lat numeric,
_lng numeric,
_codelength integer DEFAULT 10)
RETURNS text
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
code text DEFAULT '';
code_alphabet text := '23456789CFGHJMPQRVWX';
sum_lat_tosubstract numeric;
sum_lng_tosubstract numeric;
classic_code int := 10;
precision_up int := 0;
digit_sub FLOAT ARRAY DEFAULT ARRAY[20.0, 1.0, 0.05, 0.0025, 0.000125];
code_11_digit text default '';
latPlaceValue numeric;
lngPlaceValue numeric;
latitude numeric;
longitude numeric;
adjust_lat numeric;
adjust_lng numeric;
_row numeric;
_col numeric;
nb_rows int default 5;
nb_cols int default 4;
_isvalid_params boolean default false;
BEGIN
IF (_codelength < 2 OR (_codelength < 10 AND (_codelength % 2 = 1))) THEN
RAISE EXCEPTION 'OLCode is not valid --> %', _codelength
USING HINT = 'Use an int in this array [2,4,6,8,10,10+]';
END IF;
IF (_lat>90) OR (_lat<-90) THEN
RAISE EXCEPTION 'Latitude limit excedeed --> %', _lat
USING HINT = 'Use a value between -90 and 90';
END IF;
IF (_lng>180) OR (_lng<-180) THEN
RAISE EXCEPTION 'Longitude limit excedeed --> %', _lng
USING HINT = 'Use a value between -180 and 180';
END IF;
--calculate precision
precision_up := _codelength - classic_code;
--block1 for 2 digits get the first couple of chars
code = code || substring(code_alphabet from floor((_lat+90)/digit_sub[1])::int + 1 for 1);
sum_lat_tosubstract := (floor((_lat+90)/digit_sub[1])::int ) * digit_sub[1];
code = code || substring(code_alphabet from floor((_lng+180)/digit_sub[1])::int + 1 for 1);
sum_lng_tosubstract := (floor((_lng+180)/digit_sub[1])::int) * digit_sub[1];
--block2 for 4 digits get the second couple of chars
IF (_codelength > 3) THEN
code = code || substring(code_alphabet from floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[2])::int + 1 for 1);
sum_lat_tosubstract = sum_lat_tosubstract + (floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[2])) * digit_sub[2];
code = code || substring(code_alphabet from floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[2])::int + 1 for 1);
sum_lng_tosubstract = sum_lng_tosubstract + (floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[2])) * digit_sub[2];
ELSE code = code||'00';
END IF;
--block3 for 6 digits get the third couple of chars
IF (_codelength > 5) THEN
code = code || substring(code_alphabet from floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[3])::int + 1 for 1);
sum_lat_tosubstract = sum_lat_tosubstract + (floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[3])) * digit_sub[3];
code = code || substring(code_alphabet from floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[3])::int + 1 for 1);
sum_lng_tosubstract = sum_lng_tosubstract + (floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[3])) * digit_sub[3];
ELSE code = code||'00';
END IF;
--block4 for 8 digits get the fourth couple of chars
IF (_codelength > 7) THEN
code = code || substring(code_alphabet from floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[4])::int + 1 for 1);
sum_lat_tosubstract = sum_lat_tosubstract + (floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[4])) * digit_sub[4];
code = code || substring(code_alphabet from floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[4])::int + 1 for 1);
sum_lng_tosubstract = sum_lng_tosubstract + (floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[4])) * digit_sub[4];
ELSE code = code||'00';
END IF;
code=code||'+';
--block5 for 10 digits get the fifth couple of chars
IF (_codelength > 9) THEN
code = code || substring(code_alphabet from floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[5])::int + 1 for 1);
sum_lat_tosubstract = sum_lat_tosubstract + (floor(((_lat+90)-sum_lat_tosubstract)/digit_sub[5])) * digit_sub[5];
code = code || substring(code_alphabet from floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[5])::int + 1 for 1);
sum_lng_tosubstract = sum_lng_tosubstract + (floor(((_lng+180)-sum_lng_tosubstract)/digit_sub[5])) * digit_sub[5];
END IF;
--after 10 digits
IF precision_up > 0 THEN
code_11_digit = '';
latPlaceValue := 0.000125;
lngPlaceValue := 0.000125;
--delete degrees for lat and lng
latitude := _lat::numeric % 1.0::numeric;
longitude := _lng::numeric % 1.0::numeric;
adjust_lat := latitude::numeric % latPlaceValue::numeric;
adjust_lng := longitude::numeric % lngPlaceValue::numeric;
--loop for precision > 10
--use a grid 5*4
FOR it IN 1..precision_up LOOP
_row = floor(adjust_lat / ( latPlaceValue / nb_rows));
_col = floor(adjust_lng / ( lngPlaceValue / nb_cols));
latPlaceValue = latPlaceValue / nb_rows;
lngPlaceValue = lngPlaceValue / nb_cols;
adjust_lat = adjust_lat - (_row * latPlaceValue);
adjust_lng = adjust_lng - (_col * lngPlaceValue);
code_11_digit = code_11_digit || substring(code_alphabet from ((_row * nb_cols + _col))::int + 1 for 1);
END LOOP;
END IF;
RETURN code||code_11_digit ;
END;
$BODY$;
-- pluscode_decode ####
-- Decode a pluscode to get the corresponding bounding box and the center
-- PARAMETERS
-- code text// the pluscode to decode
-- EXAMPLE
-- select pluscode_decode('CCCCCCCC+');
CREATE OR REPLACE FUNCTION public.pluscode_decode(
code text)
RETURNS TABLE(lat_lo numeric, lng_lo numeric, lat_hi numeric, lng_hi numeric, code_length numeric, lat_center numeric, lng_center numeric)
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
ROWS 1000
AS $BODY$
DECLARE
lat_out float := 0;
lng_out float := 0;
latitude_max_ int := 90;
longitude_max_ int := 180;
lat_precision numeric := 0;
lng_precision numeric:= 0;
code_alphabet text := '23456789CFGHJMPQRVWX';
stripped_code text := UPPER(replace(replace(code,'0',''),'+',''));
encoding_base_ int := char_length(code_alphabet);
pair_precision_ numeric := power(encoding_base_::double precision, 3::double precision);
normal_lat numeric:= -latitude_max_ * pair_precision_;
normal_lng numeric:= -longitude_max_ * pair_precision_;
grid_lat_ numeric:= 0;
grid_lng_ numeric:= 0;
max_digit_count_ int:= 15;
pair_code_length_ int:=10;
digits int:= 0;
pair_first_place_value_ numeric:= power(encoding_base_, (pair_code_length_/2)-1);
pv int:= 0;
iterator int:=0;
iterator_d int:=0;
digit_val int := 0;
row_ numeric := 0;
col_ numeric := 0;
return_record record;
grid_code_length_ int:= max_digit_count_ - pair_code_length_;
grid_columns_ int := 4;
grid_rows_ int := 5;
grid_lat_first_place_value_ int := power(grid_rows_, (grid_code_length_ - 1));
grid_lng_first_place_value_ int := power(grid_columns_, (grid_code_length_ - 1));
final_lat_precision_ numeric := pair_precision_ * power(grid_rows_, (max_digit_count_ - pair_code_length_));
final_lng_precision_ numeric := pair_precision_ * power(grid_columns_, (max_digit_count_ - pair_code_length_));
rowpv numeric := 0;
colpv numeric := 0;
BEGIN
IF (pluscode_isfull(code)) is FALSE THEN
RAISE EXCEPTION 'NOT A VALID FULL CODE: %', code;
END IF;
--strip 0 and + chars
code:= stripped_code;
normal_lat := -latitude_max_ * pair_precision_;
normal_lng := -longitude_max_ * pair_precision_;
--how many digits must be used
IF (char_length(code) > pair_code_length_) THEN
digits := pair_code_length_;
ELSE
digits := char_length(code);
END IF;
pv := pair_first_place_value_;
WHILE iterator < digits
LOOP
normal_lat := normal_lat + (POSITION( SUBSTRING(code FROM iterator+1 FOR 1) IN code_alphabet)-1 )* pv;
normal_lng := normal_lng + (POSITION( SUBSTRING(code FROM iterator+1+1 FOR 1) IN code_alphabet)-1 ) * pv;
IF (iterator < (digits -2)) THEN
pv := pv/encoding_base_;
END IF;
iterator := iterator + 2;
END LOOP;
--convert values to degrees
lat_precision := pv/ pair_precision_;
lng_precision := pv/ pair_precision_;
IF (char_length(code) > pair_code_length_) THEN
IF (char_length(code) > max_digit_count_) THEN
digits := max_digit_count_;
ELSE
digits := char_length(code);
END IF;
iterator_d := pair_code_length_;
WHILE iterator_d < digits
LOOP
digit_val := (POSITION( SUBSTRING(code FROM iterator_d+1 FOR 1) IN code_alphabet)-1);
row_ := ceil(digit_val/grid_columns_);
col_ := digit_val % grid_columns_;
grid_lat_ := grid_lat_ +(row_*rowpv);
grid_lng_ := grid_lng_ +(col_*colpv);
IF ( iterator_d < (digits -1) ) THEN
rowpv := rowpv / grid_rows_;
colpv := colpv / grid_columns_;
END IF;
iterator_d := iterator_d + 1;
END LOOP;
--adjust precision
lat_precision := rowpv / final_lat_precision_;
lng_precision := colpv / final_lng_precision_;
END IF;
--merge the normal and extra precision of the code
lat_out := normal_lat / pair_precision_ + grid_lat_ / final_lat_precision_;
lng_out := normal_lng / pair_precision_ + grid_lng_ / final_lng_precision_;
IF (char_length(code) > max_digit_count_ ) THEN
digits := max_digit_count_;
RAISE NOTICE 'lat_out max_digit_count_ %', lat_out;
ELSE
digits := char_length(code);
RAISE NOTICE 'digits char_length%', digits;
END IF ;
return_record := pluscode_codearea(
lat_out::numeric,
lng_out::numeric,
(lat_out+lat_precision)::numeric,
(lng_out+lng_precision)::numeric,
digits::int
);
RETURN QUERY SELECT
return_record.lat_lo,
return_record.lng_lo,
return_record.lat_hi,
return_record.lng_hi,
return_record.code_length,
return_record.lat_center,
return_record.lng_center
;
END;
$BODY$;
-- pluscode_shorten ####
-- Remove characters from the start of an OLC code.
-- PARAMETERS
-- code text //full code
-- latitude numeric //latitude to use for the reference location
-- longitude numeric //longitude to use for the reference location
-- EXAMPLE
-- select pluscode_shorten('8CXX5JJC+6H6H6H',49.18,-0.37);
CREATE OR REPLACE FUNCTION public.pluscode_shorten(
code text,
latitude numeric,
longitude numeric)
RETURNS text
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
padding_character text :='0';
code_area record;
min_trimmable_code_len int:= 6;
range_ numeric:= 0;
lat_dif numeric:= 0;
lng_dif numeric:= 0;
pair_resolutions_ FLOAT[] := ARRAY[20.0, 1.0, 0.05, 0.0025, 0.000125]::FLOAT[];
iterator int:= 0;
BEGIN
IF (pluscode_isfull(code)) is FALSE THEN
RAISE EXCEPTION 'Code is not full and valid: %', code;
END IF;
IF (POSITION(padding_character IN code) > 0) THEN
RAISE EXCEPTION 'Code contains 0 character(s), not valid : %', code;
END IF;
code := UPPER(code);
code_area := pluscode_decode(code);
IF (code_area.code_length < min_trimmable_code_len ) THEN
RAISE EXCEPTION 'Code must contain more than 6 character(s) : %',code;
END IF;
--Are the latitude and longitude valid
IF (pg_typeof(latitude) NOT IN ('numeric','real','double precision','integer','bigint','float')) OR (pg_typeof(longitude) NOT IN ('numeric','real','double precision','integer','bigint','float')) THEN
RAISE EXCEPTION 'LAT || LNG are not numbers % !',pg_typeof(latitude)||' || '||pg_typeof(longitude);
END IF;
latitude := pluscode_clipLatitude(latitude);
longitude := pluscode_normalizelongitude(longitude);
lat_dif := ABS(code_area.lat_center - latitude);
lng_dif := ABS(code_area.lng_center - longitude);
--calculate max distance with the center
IF (lat_dif > lng_dif) THEN
range_ := lat_dif;
ELSE
range_ := lng_dif;
END IF;
iterator := ARRAY_LENGTH( pair_resolutions_, 1)-2;
WHILE ( iterator >= 1 )
LOOP
--is it close enough to shortent the code ?
--use 0.3 for safety instead of 0.5
IF ( range_ < (pair_resolutions_[ iterator ]*0.3) ) THEN
RETURN SUBSTRING( code , ((iterator+1)*2)-1 );
END IF;
iterator := iterator - 1;
END LOOP;
RETURN code;
END;
$BODY$;
-- pluscode_recovernearest ####
-- Retrieve a valid full code (the nearest from lat/lng).
-- PARAMETERS
-- short_code text // a valid shortcode
-- reference_latitude numeric // a valid latitude
-- reference_longitude numeric // a valid longitude
-- EXAMPLE
-- select pluscode_recovernearest('XX5JJC+', 49.1805,-0.3786);
CREATE OR REPLACE FUNCTION public.pluscode_recovernearest(
short_code text,
reference_latitude numeric,
reference_longitude numeric)
RETURNS text
LANGUAGE 'plpgsql'
COST 100
IMMUTABLE
AS $BODY$
DECLARE
padding_length int :=0;
separator_position_ int := 8;
separator_ text := '+';
resolution int := 0;
half_resolution numeric := 0;
code_area record;
latitude_max int := 90;
code_out text := '';
BEGIN
IF (pluscode_isshort(short_code)) is FALSE THEN
IF (pluscode_isfull(short_code)) THEN
RETURN UPPER(short_code);
ELSE
RAISE EXCEPTION 'Short code is not valid: %', short_code;
END IF;
RAISE EXCEPTION 'NOT A VALID FULL CODE: %', code;
END IF;
--Are the latitude and longitude valid
IF (pg_typeof(reference_latitude) NOT IN ('numeric','real','double precision','integer','bigint','float')) OR (pg_typeof(reference_longitude) NOT IN ('numeric','real','double precision','integer','bigint','float')) THEN
RAISE EXCEPTION 'LAT || LNG are not numbers % !',pg_typeof(latitude)||' || '||pg_typeof(longitude);
END IF;
reference_latitude := pluscode_clipLatitude(reference_latitude);
reference_longitude := pluscode_normalizeLongitude(reference_longitude);
short_code := UPPER(short_code);
-- Calculate the number of digits to recover.
padding_length := separator_position_ - POSITION(separator_ in short_code)+1;
-- Calculate the resolution of the padded area in degrees.
resolution := power(20, 2 - (padding_length / 2));
-- Half resolution for difference with the center
half_resolution := resolution / 2.0;
-- Concatenate short_code and the calculated value --> encode(lat,lng)
code_area := pluscode_decode(SUBSTRING(pluscode_encode(reference_latitude::numeric, reference_longitude::numeric) , 1 , padding_length) || short_code);
--Check if difference with the center is more than half_resolution
--Keep value between -90 and 90
IF (((reference_latitude + half_resolution) < code_area.lat_center) AND ((code_area.lat_center - resolution) >= -latitude_max)) THEN
code_area.lat_center := code_area.lat_center - resolution;
ELSIF (((reference_latitude - half_resolution) > code_area.lat_center) AND ((code_area.lat_center + resolution) <= latitude_max)) THEN
code_area.lat_center := code_area.lat_center + resolution;
END IF;
-- difference with the longitude reference
IF (reference_longitude + half_resolution < code_area.lng_center ) THEN
code_area.lng_center := code_area.lng_center - resolution;
ELSIF (reference_longitude - half_resolution > code_area.lng_center) THEN
code_area.lng_center := code_area.lng_center + resolution;
END IF;
code_out := pluscode_encode(code_area.lat_center::numeric, code_area.lng_center::numeric, code_area.code_length::integer);
RETURN code_out;
END;
$BODY$; | the_stack |
-- 28.01.2016 15:03
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process (AccessLevel,AD_Client_ID,AD_Org_ID,AD_Process_ID,Classname,CopyFromProcess,Created,CreatedBy,EntityType,IsActive,IsBetaFunctionality,IsDirectPrint,IsOneInstanceOnly,IsReport,IsServerProcess,LockWaitTimeout,Name,RefreshAllAfterExecution,ShowHelp,Statistic_Count,Statistic_Seconds,Type,Updated,UpdatedBy,Value) VALUES ('3',0,0,540648,'de.metas.acct.process.Fact_Acct_ActivityChangeRequest_Populate','N',TO_TIMESTAMP('2016-01-28 15:03:18','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.acct','Y','N','N','N','N','N',0,'Populate activity change requests','N','Y',0,0,'Java',TO_TIMESTAMP('2016-01-28 15:03:18','YYYY-MM-DD HH24:MI:SS'),100,'Fact_Acct_ActivityChangeRequest_Populate')
;
-- 28.01.2016 15:03
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Trl (AD_Language,AD_Process_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_ID=540648 AND NOT EXISTS (SELECT * FROM AD_Process_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_ID=t.AD_Process_ID)
;
-- 28.01.2016 15:05
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,206,0,540648,540857,19,'C_Period_ID',TO_TIMESTAMP('2016-01-28 15:05:59','YYYY-MM-DD HH24:MI:SS'),100,'Periode des Kalenders','U',0,'"Periode" bezeichnet einen eklusiven Datumsbereich eines Kalenders.','Y','N','Y','N','N','Periode',10,TO_TIMESTAMP('2016-01-28 15:05:59','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:05
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540857 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,198,0,540648,540858,19,'C_ElementValue_ID',TO_TIMESTAMP('2016-01-28 15:05:59','YYYY-MM-DD HH24:MI:SS'),100,'Kontenart','U',0,'Account Elements can be natural accounts or user defined values.','Y','N','Y','N','N','Kontenart',20,TO_TIMESTAMP('2016-01-28 15:05:59','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540858 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,454,0,540648,540859,19,'M_Product_ID',TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100,'Produkt, Leistung, Artikel','U',0,'Bezeichnet eine Einheit, die in dieser Organisation gekauft oder verkauft wird.','Y','N','Y','N','N','Produkt',30,TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540859 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542952,0,540648,540860,10,'IsMandatoryActivity',TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100,'U',0,'Y','N','Y','N','N','Kostenstelle ist Pflichtangabe',40,TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540860 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,865,0,540648,540861,10,'DocBaseType',TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100,'Logical type of document','U',0,'The Document Base Type identifies the base or starting point for a document. Multiple document types may share a single document base type.','Y','N','Y','N','N','Document BaseType',50,TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540861 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,187,0,540648,540862,19,'C_BPartner_ID',TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100,'Bezeichnet einen Geschäftspartner','U',0,'Ein Geschäftspartner ist jemand, mit dem Sie interagieren. Dies kann Lieferanten, Kunden, Mitarbeiter oder Handelsvertreter umfassen.','Y','N','Y','N','N','Geschäftspartner',60,TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540862 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,196,0,540648,540863,19,'C_DocType_ID',TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100,'Belegart oder Verarbeitungsvorgaben','U',0,'Die Belegart bestimmt den Nummernkreis und die Vorgaben für die Belegverarbeitung.','Y','N','Y','N','N','Belegart',70,TO_TIMESTAMP('2016-01-28 15:06:00','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 15:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540863 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 16:31
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Table_Process (AD_Client_ID,AD_Org_ID,AD_Process_ID,AD_Table_ID,Created,CreatedBy,EntityType,IsActive,Updated,UpdatedBy) VALUES (0,0,540648,540701,TO_TIMESTAMP('2016-01-28 16:31:06','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.acct','Y',TO_TIMESTAMP('2016-01-28 16:31:06','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element (AD_Client_ID,AD_Element_ID,AD_Org_ID,ColumnName,Created,CreatedBy,EntityType,IsActive,Name,PrintName,Updated,UpdatedBy) VALUES (0,542955,0,'IsActivityNull',TO_TIMESTAMP('2016-01-28 16:33:25','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.acct','Y','Nicht Kostellen','Nicht Kostellen',TO_TIMESTAMP('2016-01-28 16:33:25','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Element_Trl (AD_Language,AD_Element_ID, Description,Help,Name,PO_Description,PO_Help,PO_Name,PO_PrintName,PrintName, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Element_ID, t.Description,t.Help,t.Name,t.PO_Description,t.PO_Help,t.PO_Name,t.PO_PrintName,t.PrintName, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Element t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Element_ID=542955 AND NOT EXISTS (SELECT * FROM AD_Element_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Element_ID=t.AD_Element_ID)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,Help,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,290,0,540648,540864,10,'DocumentNo',TO_TIMESTAMP('2016-01-28 16:33:43','YYYY-MM-DD HH24:MI:SS'),100,'Document sequence number of the document','U',0,'The document number is usually automatically generated by the system and determined by the document type of the document. If the document is not saved, the preliminary number is displayed in "<>".
If the document type of your document has no automatic document sequence defined, the field is empty if you create a new document. This is for documents which usually have an external number (like vendor invoice). If you leave the field empty, the system will generate a document number for you. The document sequence used for this fallback number is defined in the "Maintain Sequence" window with the name "DocumentNo_<TableName>", where TableName is the actual name of the table (e.g. C_Order).','Y','N','Y','N','N','Beleg Nr.',80,TO_TIMESTAMP('2016-01-28 16:33:43','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540864 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,542955,0,540648,540865,10,'IsActivityNull',TO_TIMESTAMP('2016-01-28 16:33:43','YYYY-MM-DD HH24:MI:SS'),100,'U',0,'Y','N','Y','N','N','Nicht Kostellen',90,TO_TIMESTAMP('2016-01-28 16:33:43','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540865 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para (AD_Client_ID,AD_Element_ID,AD_Org_ID,AD_Process_ID,AD_Process_Para_ID,AD_Reference_ID,ColumnName,Created,CreatedBy,Description,EntityType,FieldLength,IsActive,IsAutocomplete,IsCentrallyMaintained,IsMandatory,IsRange,Name,SeqNo,Updated,UpdatedBy) VALUES (0,1669,0,540648,540866,10,'DeleteOld',TO_TIMESTAMP('2016-01-28 16:33:44','YYYY-MM-DD HH24:MI:SS'),100,'Otherwise records will be added','U',0,'Y','N','Y','N','N','Delete old/existing records',100,TO_TIMESTAMP('2016-01-28 16:33:44','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 28.01.2016 16:33
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Process_Para_Trl (AD_Language,AD_Process_Para_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Process_Para_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Process_Para t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Process_Para_ID=540866 AND NOT EXISTS (SELECT * FROM AD_Process_Para_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Process_Para_ID=t.AD_Process_Para_ID)
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=17, AD_Reference_Value_ID=319,Updated=TO_TIMESTAMP('2016-01-28 16:34:01','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540865
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540865
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:14','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540857
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:17','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540858
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:20','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540859
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:25','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540860
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:28','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:31','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540862
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:35','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540863
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET EntityType='de.metas.acct',Updated=TO_TIMESTAMP('2016-01-28 16:34:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540864
;
-- 28.01.2016 16:34
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=20, DefaultValue='Y', EntityType='de.metas.acct', IsMandatory='Y',Updated=TO_TIMESTAMP('2016-01-28 16:34:58','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540866
;
-- 28.01.2016 16:42
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-01-28 16:42:58','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 16:50
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-01-28 16:50:08','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 16:55
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-01-28 16:55:25','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 16:57
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-01-28 16:57:42','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=20,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=30,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540863
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=40,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540858
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=50,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540859
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=60,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540860
;
-- 28.01.2016 17:06
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=70,Updated=TO_TIMESTAMP('2016-01-28 17:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540862
;
-- 28.01.2016 17:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=40,Updated=TO_TIMESTAMP('2016-01-28 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540864
;
-- 28.01.2016 17:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=50,Updated=TO_TIMESTAMP('2016-01-28 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540858
;
-- 28.01.2016 17:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=70,Updated=TO_TIMESTAMP('2016-01-28 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540865
;
-- 28.01.2016 17:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=80,Updated=TO_TIMESTAMP('2016-01-28 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540859
;
-- 28.01.2016 17:07
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET IsActive='Y', SeqNo=90,Updated=TO_TIMESTAMP('2016-01-28 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540862
;
-- 28.01.2016 17:27
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=17, AD_Reference_Value_ID=319,Updated=TO_TIMESTAMP('2016-01-28 17:27:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540860
;
-- 28.01.2016 17:27
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET DefaultValue='Y',Updated=TO_TIMESTAMP('2016-01-28 17:27:27','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540865
;
-- 28.01.2016 17:28
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process_Para SET AD_Reference_ID=17, AD_Reference_Value_ID=183,Updated=TO_TIMESTAMP('2016-01-28 17:28:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_Para_ID=540861
;
-- 28.01.2016 17:29
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Process SET RefreshAllAfterExecution='Y',Updated=TO_TIMESTAMP('2016-01-28 17:29:01','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Process_ID=540648
; | the_stack |
-- Resources in database is represented by a state machine in which states
-- are identified by a `status` field from this enum.
CREATE TYPE resource_status AS ENUM (
-- Resource that needs to be processed.
-- {
-- status: 'Queued'
-- uri: string
-- status_text: string
-- }
'Queued',
-- Resource with a URI we failed to parse.
-- {
-- status: 'URIParseFailed'
-- uri: string
-- status_text: string
-- }
'URIParseFailed',
-- Resource that we failed to fetch. It may have `ipfsURI` if it was possible
-- to derive it from the `uri`.
-- {
-- status: 'ContentFetchFailed'
-- uri: string
-- status_text: string
-- ipfs_uri: string|null
-- }
'ContentFetchFailed',
-- Resource that we failed to submit pin request for. If it has `ipfsURI`
-- we've send a request to a cluster asking to pin given cid and it may have
-- failed. If we could not derive ipfsURI we've tried fetching data and then
-- we've send it to ipfs cluster to add to the network and pin, but cluster
-- failed. It also maybe that cluster took too long to respond so we've
-- aborted request. In any case this indicates some issue with cluster.
-- {
-- status: 'PinRequestFailed'
-- uri: string, statusText: string
-- ipfs_uri: string|null
-- }
'PinRequestFailed',
-- Resource was linked, meaning cluster gave us CID for it and queued it to
-- be pinned. In this state we'll have a corresponding `content` record
-- for this resource
-- {
-- status: 'ContentLinked'
-- uri:string
-- status_text:string
-- ipfs_uri: string|null
-- content_cid: string
-- }
'ContentLinked'
);
-- As per EIP-721 token asset is identified by a `tokenURI` that MAY
-- point to a JSON file that conforms to the "ERC721 Metadata JSON Schema.
-- Here we represent nft asset by a state machine in which states are identified
-- by a `status` field from this enum.
CREATE TYPE nft_asset_status AS ENUM (
-- nft asset was created from `tokenURI` is queued for processing.
-- {
-- status: 'Queued'
-- token_uri: string
-- status_text: string
-- }
'Queued',
-- Failed to parse uri (it is invalid or maybe protocol isn't supproted)
-- {
-- status: 'URIParseFailed'
-- token_uri: string
-- status_text: string
-- }
'URIParseFailed',
-- Failed to fetch content from `token_uri`. It may have `ipfs_uri` if it
-- was possible to derive it from the `token_uri`.
-- {
-- status: 'ContentFetchFailed'
-- token_uri: string
-- status_text: string
-- ipfs_uri: string|null
-- }
'ContentFetchFailed',
-- Failed to parse content from `token_uri` as per ERC721 Metadata JSON
-- schema.
-- {
-- status: 'ContentParseFailed'
-- token_uri: string
-- status_text: string
-- ipfs_uri: string|null
-- }
'ContentParseFailed',
-- Resource that we failed to submit pin request for. If it has `ipfsURI`
-- we've send a request to a cluster asking to pin given cid and it may have
-- failed. If we could not derive ipfsURI we've tried fetching data and then
-- we've send it to ipfs cluster to add to the network and pin, but cluster
-- failed. It also maybe that cluster took too long to respond so we've
-- aborted request. In any case this indicates some issue with cluster.
-- {
-- status: 'PinRequestFailed'
-- token_uri: string, statusText: string
-- status_text: string
-- ipfs_uri: string|null
-- }
'PinRequestFailed',
-- Asset was fetched, parsed and linked to a corresponding metadata record.
-- It has `content_cid` that cluster was asked to pin.
-- {
-- status: 'Linked'
-- uri:string
-- status_text:string
-- ipfs_uri: string|null
-- content_cid: string
-- }
'Linked'
);
CREATE TYPE pin_status AS ENUM (
'PinError',
'PinQueued',
'Pinned',
'Pinning'
);
CREATE TYPE pin_service AS ENUM (
'Pinata',
'IpfsCluster'
);
-- A blochain block identified by a it's hash.
CREATE TABLE blockchain_block (
hash TEXT NOT NULL,
number BIGINT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (hash)
);
-- NFT that were discovered on blockchain. These records would only update
-- when nft is transferred from one owner to other.
-- Each nft will have one associated `nft_asset` mapped by `token_uri`.
-- Multiple nfts may map to a same `nft_asset` but that would go against
-- ERC-721 spec, yet nothing enforces that.
CREATE TABLE nft (
-- unique identifier of the nft in the subgraph, in practice it encodes
-- (conctact_id, token_id) tuple.
id TEXT NOT NULL,
-- An ERC-721 contract identifier.
contract_id TEXT NOT NULL,
-- The NFT identifier. Note this is NOT globally unique, but rather unique
-- per ERC-721 contract.
token_id TEXT NOT NULL,
-- A distinct URI to a given nft asset. Per ERC-721 spec that is URI that
-- MAY point to a JSON file that conforms to the "ERC721 Metadata JSON
-- Schema".
token_uri TEXT NOT NULL,
-- Timestamp of when nft was minted.
mint_time TIMESTAMP WITH TIME ZONE NOT NULL,
-- Current owner of this nft. This changes over time.
nft_owner_id TEXT NOT NULL,
-- Time when last this record was updated.
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
-- Time when this record was created.
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (id)
);
-- Blochain block may contain multiple nft transfers & this table lets us
-- track n:m relation of which NFTs were discovered in which blocks.
CREATE TABLE nfts_by_blockchain_blocks (
blockchain_block_hash TEXT NOT NULL,
nft_id TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (blockchain_block_hash, nft_id)
);
CREATE TABLE nft_owner (
id TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (id)
);
-- Each record represents state of the NFT asset in the system. Once new
-- nfts from blochain are discovered their `nft_asset`s are created for
-- their `token_uri`s. As system processes incoming data it will update
-- state of each `nft_asset` as per `nft_asset_status`.
CREATE TABLE nft_asset (
-- URI that was discovered on chain.
token_uri TEXT NOT NULL,
-- Represents `ipfs://` URL for this asset. It is present if it was possible
-- to derive it from `token_uri`. E.g. If `token_uri` is IPFS gateway URL
-- https://ipfs.io/ipfs/Qm...Hash/file/path this field will be derived to
-- ipfs://Qm...Hash/file/path.
ipfs_url TEXT,
-- Cryptographic identifier of the content under `token_uri`. Will be only
-- present in `Lined` state.
content_cid TEXT,
status nft_asset_status NOT NULL,
status_text TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (token_uri)
);
-- Once `token_asset` is fetched and parsed as per ERC-721 metadata schema
-- all mandatory fields will be stored in this table.
CREATE TABLE nft_metadata (
-- Cryptographic identifier of the content this metadata was parsed from.
content_cid TEXT NOT NULL,
name TEXT NOT NULL,
description TEXT NOT NULL,
image_uri TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (content_cid)
);
-- Once nft asset is fetched and parsed all the ecountered URLs are saved
-- as `resources` and concurrently archived. In the process resource state is
-- updated as per `resource_status`.
CREATE TABLE resource (
-- Unique Resource Identifier (URI) of this resource.
uri TEXT NOT NULL,
-- Current status of the resource.
status resource_status NOT NULL,
status_text TEXT,
-- Represents `ipfs://` URL for this asset. It is present if it was possible
-- to derive it from `token_uri`. E.g. If `token_uri` is IPFS gateway URL
-- https://ipfs.io/ipfs/Qm...Hash/file/path this field will be derived to
-- ipfs://Qm...Hash/file/path.
ipfs_url TEXT,
-- Cryptographic identifier of the content under `uri`. Will be only
-- present in `Lined` state.
content_cid TEXT,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (uri)
);
-- nft assets according to ERC-721 metadata spec must link to an `image`,
-- however in practice various contracts also tend to include links to
-- more resources in metadata JSON. This table maps `content_id` of the
-- `nft_asset` / `nft_metadata` to those unspecified resources.
CREATE TABLE other_nft_resources (
content_cid TEXT NOT NULL,
resource_uri TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (content_cid, resource_uri)
);
CREATE TABLE content (
cid TEXT NOT NULL,
dag_size BIGINT,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (cid)
);
CREATE TABLE pin (
id BIGSERIAL NOT NULL,
content_cid TEXT NOT NULL,
service pin_service NOT NULL,
status pin_status NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE blockchain_contract (
id TEXT NOT NULL,
name TEXT,
symbol TEXT,
supports_eip721_metadata BOOLEAN NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE erc721_import (
id TEXT NOT NULL,
next_id TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE erc721_import_by_nft (
erc721_import_id TEXT NOT NULL,
nft_id TEXT NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
PRIMARY KEY (erc721_import_id,nft_id)
);
CREATE UNIQUE INDEX unique_blockchain_block_hash ON blockchain_block(number); | the_stack |
--
--
-- Title: SQL Core (base) Install File
--
-- Author: Pierre-Henry Soria <hello@ph7cms.com>
-- Copyright: (c) 2012-2020, Pierre-Henry Soria. All Rights Reserved.
-- License: MIT License; See PH7.LICENSE.txt and PH7.COPYRIGHT.txt in the root directory.
-- Package: PH7 / Install / Data / Sql / MySQL
--
--
--
-- Set the variables --
--
SET @sDefaultSiteName = 'My Dating WebApp';
SET @sAdminEmail = 'admin@yoursite.com';
SET @sFeedbackEmail = 'feedback@yoursite.com';
SET @sNoReplyEmail = 'noreply@yoursite.com';
SET @sDefaultSysModule = 'user';
SET @sDefaultTemplate = 'base';
SET @sIpApiUrl = 'https://whatismyipaddress.com/ip/';
SET @sDefaultVideoUrl = 'https://www.youtube.com/watch?v=q-1eHnBOg4A';
SET @sChatApiUrl = 'https://ph7cms.com/addons/chat/?name=%site_name%&url=%site_url%&skin=4';
SET @sChatrouletteApiUrl = 'https://ph7cms.com/addons/chatroulette/?name=%site_name%&url=%site_url%&skin=1';
SET @iUserVisitorGroup = 1;
SET @iUserPendingGroup = 9;
SET @iUserRegularGroup = 2;
SET @sCurrentDate = CURRENT_TIMESTAMP;
SET @sPassword = SHA1(RAND() + UNIX_TIMESTAMP());
CREATE TABLE IF NOT EXISTS ph7_admins (
profileId tinyint(3) unsigned NOT NULL AUTO_INCREMENT,
username varchar(40) NOT NULL,
password varchar(120) NOT NULL,
email varchar(120) NOT NULL,
firstName varchar(50) DEFAULT NULL,
lastName varchar(50) DEFAULT NULL,
sex enum('male','female') NOT NULL DEFAULT 'male',
lang varchar(5) NOT NULL DEFAULT 'en_US',
timeZone varchar(6) NOT NULL DEFAULT '-6',
joinDate datetime DEFAULT NULL,
lastActivity datetime DEFAULT NULL,
lastEdit datetime DEFAULT NULL,
ban enum('0','1') DEFAULT '0',
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
isTwoFactorAuth enum('1','0') DEFAULT '0',
twoFactorAuthSecret varchar(40) DEFAULT NULL,
hashValidation varchar(40) DEFAULT NULL,
PRIMARY KEY (profileId),
UNIQUE KEY username (username),
UNIQUE KEY email (email)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_memberships (
groupId tinyint(2) unsigned NOT NULL AUTO_INCREMENT,
name varchar(64) NOT NULL DEFAULT '',
description varchar(191) NOT NULL,
permissions text NOT NULL,
price decimal(10,2) unsigned NOT NULL,
expirationDays smallint(4) unsigned NOT NULL,
enable enum('1','0') DEFAULT '1',
PRIMARY KEY (groupId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_memberships (groupId, name, description, permissions, price, expirationDays, enable) VALUES
(@iUserVisitorGroup, 'Visitor (not visible)', 'This subscription is offered to all visitors who visit the site.', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"0";s:10:"send_mails";s:1:"0";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"0";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"0";s:17:"instant_messaging";s:1:"0";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"0";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"0";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"0";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"0";s:19:"answer_forum_topics";s:1:"0";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"0";}', 0.00, 0, '1'),
(@iUserPendingGroup, 'Pending (not visible)', 'Pending subscription provisional migration to a different subscription.', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"0";s:10:"send_mails";s:1:"0";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"0";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"0";s:17:"instant_messaging";s:1:"0";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"0";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"0";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"0";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"0";s:19:"answer_forum_topics";s:1:"0";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"0";}', 0.00, 15, '0'),
(@iUserRegularGroup, 'Regular (Free)', 'Free Membership.', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"1";s:10:"send_mails";s:1:"1";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"1";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"1";s:17:"instant_messaging";s:1:"1";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"1";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"1";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"1";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"1";s:19:"answer_forum_topics";s:1:"1";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"1";}', 0.00, 0, '1'),
(4, 'Platinum', 'The membership for the small budget.', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"1";s:10:"send_mails";s:1:"1";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"1";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"1";s:17:"instant_messaging";s:1:"1";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"1";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"1";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"1";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"1";s:19:"answer_forum_topics";s:1:"1";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"1";}', 9.99, 5, '1'),
(5, 'Silver', 'The premium membership!', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"1";s:10:"send_mails";s:1:"1";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"1";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"1";s:17:"instant_messaging";s:1:"1";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"1";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"1";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"1";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"1";s:19:"answer_forum_topics";s:1:"1";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"1";}', 19.99, 10, '1'),
(6, 'Gold', 'The must membership! The Gold!!!', 'a:23:{s:21:"quick_search_profiles";s:1:"1";s:24:"advanced_search_profiles";s:1:"1";s:10:"read_mails";s:1:"1";s:10:"send_mails";s:1:"1";s:13:"view_pictures";s:1:"1";s:15:"upload_pictures";s:1:"1";s:11:"view_videos";s:1:"1";s:13:"upload_videos";s:1:"1";s:17:"instant_messaging";s:1:"1";s:4:"chat";s:1:"1";s:12:"chatroulette";s:1:"1";s:10:"hot_or_not";s:1:"1";s:15:"love_calculator";s:1:"1";s:10:"read_notes";s:1:"1";s:11:"write_notes";s:1:"1";s:15:"read_blog_posts";s:1:"1";s:13:"view_comments";s:1:"1";s:14:"write_comments";s:1:"1";s:12:"forum_access";s:1:"1";s:19:"create_forum_topics";s:1:"1";s:19:"answer_forum_topics";s:1:"1";s:12:"games_access";s:1:"1";s:18:"member_site_access";s:1:"1";}', 29.99, 30, '1');
CREATE TABLE IF NOT EXISTS ph7_members (
profileId int(10) unsigned NOT NULL AUTO_INCREMENT,
email varchar(120) NOT NULL,
username varchar(40) NOT NULL,
password varchar(120) NOT NULL,
firstName varchar(50) DEFAULT NULL,
lastName varchar(50) DEFAULT NULL,
birthDate date NULL,
sex enum('male','female','couple') NOT NULL DEFAULT 'female',
matchSex set('male','female','couple') NOT NULL DEFAULT 'male',
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
bankAccount varchar(150) DEFAULT NULL,
groupId tinyint(2) unsigned NOT NULL DEFAULT 2,
membershipDate datetime DEFAULT NULL,
userStatus tinyint(1) unsigned NOT NULL DEFAULT 1, -- 0 = Offline, 1 = Online, 2 = Busy, 3 = Away
joinDate datetime DEFAULT NULL,
lastActivity datetime DEFAULT NULL,
lastEdit datetime DEFAULT NULL,
avatar char(5) DEFAULT NULL,
approvedAvatar tinyint(1) unsigned NOT NULL DEFAULT 1,
featured tinyint(1) unsigned NOT NULL DEFAULT 0,
lang varchar(5) NOT NULL DEFAULT 'en_US',
hashValidation varchar(40) DEFAULT NULL,
isTwoFactorAuth enum('1','0') DEFAULT '0',
twoFactorAuthSecret varchar(40) DEFAULT NULL,
views int(11) unsigned NOT NULL DEFAULT 0,
reference varchar(191) DEFAULT NULL,
votes int(11) DEFAULT 0,
score float DEFAULT 0,
credits int(6) unsigned NOT NULL DEFAULT 0, -- Not used for the moment (maybe in the future by the payment module)
affiliatedId int(10) unsigned NOT NULL DEFAULT 0,
active tinyint(1) unsigned NOT NULL DEFAULT 1,
ban tinyint(1) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (profileId),
FOREIGN KEY (groupId) REFERENCES ph7_memberships(groupId),
UNIQUE KEY (username),
UNIQUE KEY (email),
KEY birthDate (birthDate)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_members_info (
profileId int(10) unsigned NOT NULL,
middleName varchar(50) DEFAULT NULL,
description text DEFAULT NULL,
punchline varchar(191) DEFAULT NULL,
address varchar(191) DEFAULT NULL,
city varchar(150) DEFAULT NULL,
state varchar(150) DEFAULT NULL,
zipCode varchar(20) DEFAULT NULL,
country char(2) DEFAULT NULL,
phone varchar(100) DEFAULT NULL,
website varchar(120) DEFAULT NULL,
socialNetworkSite varchar(120) DEFAULT NULL,
height tinyint(3) unsigned DEFAULT NULL,
weight tinyint(3) unsigned DEFAULT NULL,
PRIMARY KEY (profileId),
KEY country (country),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_members_privacy (
profileId int(10) unsigned NOT NULL,
privacyProfile enum('all','only_members','only_me') NOT NULL DEFAULT 'all',
searchProfile enum('yes','no') NOT NULL DEFAULT 'yes',
userSaveViews enum('yes','no') NOT NULL DEFAULT 'yes',
PRIMARY KEY (profileId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_notifications (
profileId int(10) unsigned NOT NULL,
enableNewsletters tinyint(1) unsigned NOT NULL DEFAULT 1,
newMsg tinyint(1) unsigned NOT NULL DEFAULT 1,
friendRequest tinyint(1) unsigned NOT NULL DEFAULT 1,
PRIMARY KEY (profileId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- GHOST User. Do not remove ghost default member!
INSERT INTO ph7_members (profileId, email, username, password, firstName, lastName, birthDate, ip, lastActivity, joinDate, active) VALUES
(1, 'ghost@ghost', 'ghost', @sPassword, 'Ghost', 'The Ghost', '1001-01-01', '00.000.00.00', @sCurrentDate, @sCurrentDate, 0);
INSERT INTO ph7_members_info (profileId, description, address, city, state, zipCode, country) VALUES
(1, 'This profile doesn''t exist anymore. So I''m the ghost who replaces him/her during this time', 'The Ghost City', 'Ghost Town', 'Ghost State', '000000', 'US');
-- Privacy settings
INSERT INTO ph7_members_privacy (profileId, privacyProfile, searchProfile, userSaveViews) VALUES
(1, 'all', 'yes', 'yes');
-- Notifications
INSERT INTO ph7_members_notifications (profileId, enableNewsletters, newMsg, friendRequest) VALUES
(1, 0, 0, 0);
CREATE TABLE IF NOT EXISTS ph7_affiliates (
profileId int(10) unsigned NOT NULL AUTO_INCREMENT,
username varchar(40) NOT NULL,
firstName varchar(50) NOT NULL,
lastName varchar(50) NOT NULL,
password varchar(120) NOT NULL,
email varchar(120) NOT NULL,
sex enum('male','female') NOT NULL DEFAULT 'male',
birthDate date NULL,
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
bankAccount varchar(150) DEFAULT NULL,
amount decimal(8,2) NOT NULL DEFAULT '0.00',
totalPayment decimal(8,2) NOT NULL DEFAULT '0.00',
lastPayment decimal(8,2) NOT NULL DEFAULT '0.00',
lastPaymentDate datetime NULL,
lang varchar(5) NOT NULL DEFAULT 'en_US',
hashValidation varchar(40) DEFAULT NULL,
isTwoFactorAuth enum('1','0') DEFAULT '0',
twoFactorAuthSecret varchar(40) DEFAULT NULL,
refer int(10) unsigned DEFAULT 0,
joinDate datetime DEFAULT NULL,
lastActivity datetime DEFAULT NULL,
lastEdit datetime DEFAULT NULL,
affiliatedId int(10) unsigned NOT NULL DEFAULT 0,
active tinyint(1) unsigned NOT NULL DEFAULT 1,
ban tinyint(1) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (profileId),
UNIQUE KEY bankAccount (bankAccount), -- For the Security Bank Account --
UNIQUE KEY username (username),
UNIQUE KEY email (email),
KEY birthDate (birthDate)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_affiliates_info (
profileId int(10) unsigned NOT NULL,
middleName varchar(50) DEFAULT NULL,
businessName varchar(100) DEFAULT NULL,
taxId varchar(40) DEFAULT NULL, -- Tax ID, VAT, SSN, ...
address varchar(191) DEFAULT NULL,
country char(2) DEFAULT NULL,
city varchar(150) DEFAULT NULL,
state varchar(150) DEFAULT NULL,
zipCode varchar(20) DEFAULT NULL,
phone varchar(100) DEFAULT NULL,
description text DEFAULT NULL,
website varchar(120) DEFAULT NULL,
PRIMARY KEY (profileId),
KEY country (country),
FOREIGN KEY (profileId) REFERENCES ph7_affiliates(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_block_ip (
ipId smallint(5) unsigned NOT NULL AUTO_INCREMENT,
ip varchar(45) NOT NULL,
expiration smallint(5) unsigned NOT NULL,
PRIMARY KEY (ip),
KEY ipId (ipId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 ROW_FORMAT=DYNAMIC;
CREATE TABLE IF NOT EXISTS ph7_ads (
adsId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
name varchar(40) DEFAULT NULL,
code text,
active enum('1','0') DEFAULT '1',
width smallint(3) unsigned DEFAULT NULL,
height smallint(3) unsigned DEFAULT NULL,
views int(10) unsigned NOT NULL DEFAULT 0,
clicks int(10) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (adsId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_ads (adsId, name, code, active, width, height, views, clicks) VALUES
(1, 'Sponsor pH7CMS 1 (728x90)', '<a href="#0"><img data-src="holder.js/728x90" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 728, 90, 0, 0),
(2, 'Sponsor pH7CMS 2 (728x90)', '<a href="#0"><img data-src="holder.js/728x90" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 728, 90, 0, 0),
(3, 'Sponsor pH7CMS 3 (200x200)', '<a href="#0"><img data-src="holder.js/200x200" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 200, 200, 0, 0),
(4, 'Sponsor pH7CMS 4 (200x200)', '<a href="#0"><img data-src="holder.js/200x200" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 200, 200, 0, 0),
(5, 'Sponsor pH7CMS 5 (250x250)', '<a href="#0"><img data-src="holder.js/250x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 250, 250, 0, 0),
(6, 'Sponsor pH7CMS 6 (250x250)', '<a href="#0"><img data-src="holder.js/250x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 250, 250, 0, 0),
(7, 'Sponsor pH7CMS 7 (468x60)', '<a href="#0"><img data-src="holder.js/468x60" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 468, 60, 0, 0),
(8, 'Sponsor pH7CMS 8 (468x60)', '<a href="#0"><img data-src="holder.js/468x60" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 468, 60, 0, 0),
(9, 'Sponsor pH7CMS 9 (300x250)', '<a href="#0"><img data-src="holder.js/300x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 300, 250, 0, 0),
(10, 'Sponsor pH7CMS 10 (300x250)', '<a href="#0"><img data-src="holder.js/300x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 300, 250, 0, 0),
(11, 'Sponsor pH7CMS 11 (336x280)', '<a href="#0"><img data-src="holder.js/336x280" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 336, 280, 0, 0),
(12, 'Sponsor pH7CMS 12 (336x280)', '<a href="#0"><img data-src="holder.js/336x280" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 336, 280, 0, 0),
(13, 'Sponsor pH7CMS 13 (120x600)', '<a href="#0"><img data-src="holder.js/120x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 120, 600, 0, 0),
(14, 'Sponsor pH7CMS 14 (120x600)', '<a href="#0"><img data-src="holder.js/120x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 120, 600, 0, 0),
(15, 'Sponsor pH7CMS 15 (160x600)', '<a href="#0"><img data-src="holder.js/160x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 160, 600, 0, 0),
(16, 'Sponsor pH7CMS 16 (160x600)', '<a href="#0"><img data-src="holder.js/160x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 160, 600, 0, 0);
CREATE TABLE IF NOT EXISTS ph7_ads_affiliates (
adsId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
name varchar(40) DEFAULT NULL,
code text,
active enum('1','0') DEFAULT '1',
width smallint(3) unsigned DEFAULT NULL,
height smallint(3) unsigned DEFAULT NULL,
PRIMARY KEY (adsId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_ads_affiliates (adsId, name, code, active, width, height) VALUES
(1, 'Affiliate Banner 1 (728x90)', '<a href="%affiliate_url%"><img data-src="holder.js/728x90" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 728, 90),
(2, 'Affiliate Banner 2 (728x90)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/728x90" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 728, 90),
(3, 'Affiliate Banner 3 (200x200)', '<a href="%affiliate_url%"><img data-src="holder.js/200x200" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 200, 200),
(4, 'Affiliate Banner 4 (200x200)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/200x200" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 200, 200),
(5, 'Affiliate Banner 5 (250x250)', '<a href="%affiliate_url%"><img data-src="holder.js/250x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 250, 250),
(6, 'Affiliate Banner 6 (250x250)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/250x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 250, 250),
(7, 'Affiliate Banner 7 (468x60)', '<a href="%affiliate_url%"><img data-src="holder.js/468x60" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 468, 60),
(8, 'Affiliate Banner 8 (468x60)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/468x60" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 468, 60),
(9, 'Affiliate Banner 9 (300x250)', '<a href="%affiliate_url%"><img data-src="holder.js/300x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 300, 250),
(10, 'Affiliate Banner 10 (300x250)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/300x250" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 300, 250),
(11, 'Affiliate Banner 11 (336x280)', '<a href="%affiliate_url%"><img data-src="holder.js/336x280" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 336, 280),
(12, 'Affiliate Banner 12 (336x280)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/336x280" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 336, 280),
(13, 'Affiliate Banner 13 (120x600)', '<a href="%affiliate_url%"><img data-src="holder.js/120x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 120, 600),
(14, 'Affiliate Banner 14 (120x600)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/120x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 120, 600),
(15, 'Affiliate Banner 15 (160x600)', '<a href="%affiliate_url%"><img data-src="holder.js/160x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 160, 600),
(16, 'Affiliate Banner 16 (160x600)', '<a href="%affiliate_url%/signup"><img data-src="holder.js/160x600" alt="%site_name% by %software_name%" title="%site_name% powered by %software_name%" /></a>', '0', 160, 600);
CREATE TABLE IF NOT EXISTS ph7_albums_pictures (
albumId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
name varchar(80) NOT NULL,
thumb char(11) NOT NULL, -- e.g. 2-thumb.jpg
approved enum('1','0') DEFAULT '1',
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
description varchar(191) DEFAULT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (albumId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_albums_videos (
albumId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
name varchar(80) NOT NULL,
thumb char(11) NOT NULL, -- e.g. 5-thumb.jpg
approved enum('1','0') DEFAULT '1',
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
description varchar(191) DEFAULT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (albumId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_pictures (
pictureId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
albumId int(10) unsigned NOT NULL,
title varchar(80) NOT NULL,
description varchar(191) DEFAULT NULL,
file varchar(40) NOT NULL,
approved enum('1','0') DEFAULT '1',
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (pictureId),
FOREIGN KEY (albumId) REFERENCES ph7_albums_pictures(albumId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_videos (
videoId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
albumId int(10) unsigned NOT NULL,
title varchar(80) DEFAULT NULL,
description varchar(191) DEFAULT NULL,
file varchar(191) DEFAULT NULL, -- e.g. http://youtu.be/4fplAZfO9KY or local file server.
thumb varchar(191) DEFAULT NULL, -- e.g. http://img.youtube.com/vi/4fplAZfO9KY/default.jpg or local file server.
approved enum('1','0') NOT NULL DEFAULT '1',
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
duration int(9) NOT NULL,
PRIMARY KEY (videoId),
FOREIGN KEY (albumId) REFERENCES ph7_albums_videos(albumId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_analytics_api (
analyticsId tinyint(3) unsigned NOT NULL AUTO_INCREMENT,
name varchar(32) DEFAULT NULL,
code text,
active enum('1','0') DEFAULT '1',
PRIMARY KEY (analyticsId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_analytics_api (analyticsId, name, code, active) VALUES
(1, 'Analytics Code', '', '1');
CREATE TABLE IF NOT EXISTS ph7_blogs (
blogId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
postId varchar(60) NOT NULL,
langId char(2) NOT NULL DEFAULT '',
title varchar(100) DEFAULT NULL,
content longtext NOT NULL,
pageTitle varchar(100) NOT NULL,
metaDescription varchar(191) NOT NULL,
metaKeywords varchar(191) NOT NULL,
slogan varchar(191) NOT NULL,
metaRobots varchar(50) NOT NULL,
metaAuthor varchar(50) NOT NULL,
metaCopyright varchar(50) NOT NULL,
tags varchar(191) DEFAULT NULL,
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
enableComment enum('1','0') DEFAULT '1',
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (blogId),
UNIQUE KEY postId (postId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_blogs_categories (
categoryId smallint(4) unsigned NOT NULL,
blogId mediumint(10) unsigned NOT NULL,
INDEX (categoryId),
INDEX (blogId),
FOREIGN KEY (blogId) REFERENCES ph7_blogs(blogId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_blogs_data_categories (
categoryId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
name varchar(40) DEFAULT NULL,
PRIMARY KEY (categoryId),
UNIQUE KEY (name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_blogs_data_categories (categoryId, name) VALUES
(1, 'Affiliate'),
(2, 'Business'),
(3, 'Company'),
(4, 'Dating'),
(5, 'Education'),
(6, 'Family'),
(7, 'Food'),
(8, 'Game'),
(9, 'Health'),
(10, 'Hobby'),
(11, 'Movie'),
(12, 'Music'),
(13, 'News'),
(14, 'Programming'),
(15, 'Recreation'),
(16, 'Shopping'),
(17, 'Society'),
(18, 'Sports'),
(19, 'Technology'),
(20, 'Travel');
CREATE TABLE IF NOT EXISTS ph7_notes (
noteId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
postId varchar(60) NOT NULL,
langId char(2) NOT NULL DEFAULT '',
title varchar(100) DEFAULT NULL,
content longtext NOT NULL,
pageTitle varchar(100) NOT NULL,
metaDescription varchar(191) NOT NULL,
metaKeywords varchar(191) NOT NULL,
slogan varchar(191) NOT NULL,
metaRobots varchar(50) NOT NULL,
metaAuthor varchar(50) NOT NULL,
metaCopyright varchar(50) NOT NULL,
tags varchar(191) DEFAULT NULL,
thumb char(24) DEFAULT NULL,
votes int(9) unsigned DEFAULT 0,
score float(9) unsigned DEFAULT 0,
views int(10) unsigned DEFAULT 0,
enableComment enum('1','0') DEFAULT '1',
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved tinyint(1) unsigned NOT NULL DEFAULT 1,
PRIMARY KEY (noteId),
UNIQUE KEY postId (postId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_notes_categories (
categoryId smallint(4) unsigned NOT NULL,
noteId int(10) unsigned NOT NULL,
profileId int(10) unsigned NOT NULL,
INDEX (categoryId),
INDEX (noteId),
FOREIGN KEY (noteId) REFERENCES ph7_notes(noteId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_notes_data_categories (
categoryId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
name varchar(40) DEFAULT NULL,
PRIMARY KEY (categoryId),
UNIQUE KEY (name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_notes_data_categories (categoryId, name) VALUES
(1, 'Business'),
(2, 'Companies'),
(3, 'Dating'),
(4, 'Education'),
(5, 'Family'),
(6, 'Food'),
(7, 'Game'),
(8, 'Health'),
(9, 'Hobby'),
(10, 'Movie'),
(11, 'Music'),
(12, 'News'),
(13, 'Pets'),
(14, 'Recreation'),
(15, 'Shopping'),
(16, 'Society'),
(17, 'Sports'),
(18, 'Study'),
(19, 'Technology'),
(20, 'Travel');
CREATE TABLE IF NOT EXISTS ph7_comments_blog (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient mediumint(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') NOT NULL DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted or we will allow administrator to leave a comment, so we comment on this line.
-- FOREIGN KEY (sender) REFERENCES ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_blogs(blogId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_comments_note (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient int(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') NOT NULL DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted.
-- FOREIGN KEY (sender) ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_notes(noteId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_comments_picture (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient int(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') NOT NULL DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted.
-- FOREIGN KEY (sender) ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_pictures(pictureId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_comments_video (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient int(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') NOT NULL DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted.
-- FOREIGN KEY (sender) ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_videos(videoId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_comments_game (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient int(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') NOT NULL DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted.
-- FOREIGN KEY (sender) ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_games(gameId) -- Warning: You must first download the file "pH7_Game.sql" for this table can be inserted because it uses a foreign key.
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_comments_profile (
commentId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL,
recipient int(10) unsigned NOT NULL,
comment text NOT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
approved enum('1','0') DEFAULT '1',
PRIMARY KEY (commentId),
-- Maybe we'll let the comments of the members even if they are deleted.
-- FOREIGN KEY (sender) ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_forums_categories (
categoryId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
title varchar(60) DEFAULT NULL,
PRIMARY KEY (categoryId),
UNIQUE KEY (title)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_forums_categories (categoryId, title) VALUES
(1, 'General'),
(2, 'Free Online Dating Site'),
(3, 'Business');
CREATE TABLE IF NOT EXISTS ph7_forums (
forumId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
name varchar(80) NOT NULL DEFAULT 'New forum',
description varchar(191) NOT NULL,
categoryId smallint(4) unsigned DEFAULT NULL,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (forumId),
FOREIGN KEY (categoryId) REFERENCES ph7_forums_categories(categoryId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_forums (forumId, name, description, categoryId) VALUES
(1, 'Hello', 'Free dating site', 1),
(2, 'Online Dating', 'Discussion about the online dating websites', 2),
(3, 'The Best Dating Site', 'The best dating site', 1);
CREATE TABLE IF NOT EXISTS ph7_forums_topics (
topicId int(10) unsigned NOT NULL AUTO_INCREMENT,
forumId mediumint(10) unsigned DEFAULT NULL,
profileId int(10) unsigned NOT NULL,
title varchar(100) NOT NULL,
message text NOT NULL,
approved enum('1','0') DEFAULT '1',
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
views int(11) unsigned NOT NULL DEFAULT 0,
-- Maybe we'll let the topic of member even if the member is deleted
-- FOREIGN KEY (profileId) ph7_members(profileId),
FOREIGN KEY (forumId) REFERENCES ph7_forums(forumId),
PRIMARY KEY (topicId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_forums_messages (
messageId int(10) unsigned NOT NULL AUTO_INCREMENT,
topicId int(10) unsigned NOT NULL,
profileId int(10) unsigned NOT NULL,
message text NOT NULL,
approved enum('1','0') DEFAULT '1',
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
-- Maybe we'll let the topic of member even if the member is deleted
-- FOREIGN KEY (profileId) ph7_members(profileId),
FOREIGN KEY (topicId) REFERENCES ph7_forums_topics(topicId),
PRIMARY KEY (messageId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_languages_info (
langId varchar(5) NOT NULL,
name varchar(60) NOT NULL,
charset varchar(15) NOT NULL,
active enum('0','1') NOT NULL DEFAULT '0',
direction enum('ltr','rtl') NOT NULL DEFAULT 'ltr',
author varchar(60) NOT NULL,
website varchar(120) DEFAULT NULL,
email varchar(120) DEFAULT NULL,
PRIMARY KEY (langId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
INSERT INTO ph7_languages_info (langId, name, charset, active, direction, author, website, email) VALUES
('en_US', 'English', 'UTF-8', '1', 'ltr', 'Pierre-Henry Soria', 'http://ph7.me', 'hi@ph7.me');
CREATE TABLE IF NOT EXISTS ph7_likes (
keyId varchar(191) NOT NULL,
votes int(10) unsigned NOT NULL,
lastVote datetime NOT NULL,
lastIp varchar(45) NOT NULL,
UNIQUE KEY keyId (keyId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_log_error (
logId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
logError longtext,
PRIMARY KEY (logId),
FULLTEXT KEY logError (logError) -- FULLTEXT is not supported by InnoDB in MySQL < 5.6.4, so set MyISAM engine
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_admins_attempts_login (
attemptsId int(10) unsigned NOT NULL AUTO_INCREMENT,
ip varchar(45) NOT NULL DEFAULT '',
attempts smallint(5) unsigned NOT NULL ,
lastLogin DATETIME NOT NULL,
PRIMARY KEY (attemptsId),
UNIQUE KEY (ip)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_attempts_login (
attemptsId int(10) unsigned NOT NULL AUTO_INCREMENT,
ip varchar(45) NOT NULL DEFAULT '',
attempts smallint(5) unsigned NOT NULL ,
lastLogin DATETIME NOT NULL,
PRIMARY KEY (attemptsId),
UNIQUE KEY (ip)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_affiliates_attempts_login (
attemptsId int(10) unsigned NOT NULL AUTO_INCREMENT,
ip varchar(45) NOT NULL DEFAULT '',
attempts smallint(5) unsigned NOT NULL ,
lastLogin DATETIME NOT NULL,
PRIMARY KEY (attemptsId),
UNIQUE KEY (ip)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_admins_log_login (
logId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
email varchar(120) NOT NULL DEFAULT '',
username varchar(64) NOT NULL DEFAULT '',
password varchar(40) DEFAULT NULL,
status varchar(60) NOT NULL DEFAULT '',
ip varchar(45) NOT NULL DEFAULT '',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (logId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_members_log_login (
logId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
email varchar(120) NOT NULL DEFAULT '',
username varchar(64) NOT NULL DEFAULT '',
password varchar(40) DEFAULT NULL,
status varchar(60) NOT NULL DEFAULT '',
ip varchar(45) NOT NULL DEFAULT '',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (logId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_affiliates_log_login (
logId mediumint(10) unsigned NOT NULL AUTO_INCREMENT,
email varchar(120) NOT NULL DEFAULT '',
username varchar(64) NOT NULL DEFAULT '',
password varchar(40) DEFAULT NULL,
status varchar(60) NOT NULL DEFAULT '',
ip varchar(45) NOT NULL DEFAULT '',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (logId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_admins_log_sess (
sessionId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId tinyint(3) unsigned NOT NULL,
username varchar(40) DEFAULT NULL,
email varchar(120) DEFAULT NULL,
firstName varchar(50) DEFAULT NULL,
lastName varchar(50) DEFAULT NULL,
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (sessionId),
FOREIGN KEY (profileId) REFERENCES ph7_admins(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_log_sess (
sessionId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
username varchar(40) DEFAULT NULL,
email varchar(120) DEFAULT NULL,
firstName varchar(50) DEFAULT NULL,
lastName varchar(50) DEFAULT NULL,
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (sessionId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_affiliates_log_sess (
sessionId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL,
username varchar(40) DEFAULT NULL,
email varchar(120) DEFAULT NULL,
firstName varchar(50) DEFAULT NULL,
lastName varchar(50) DEFAULT NULL,
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
dateTime timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (sessionId),
FOREIGN KEY (profileId) REFERENCES ph7_affiliates(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_background (
profileId int(10) unsigned NOT NULL,
file varchar(5) NOT NULL,
approved tinyint(1) unsigned NOT NULL DEFAULT 1,
PRIMARY KEY profileId (profileId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_who_views (
profileId int(10) unsigned NOT NULL,
visitorId int(10) unsigned NOT NULL,
lastVisit datetime NULL,
INDEX profileId (profileId),
INDEX visitorId (visitorId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId),
FOREIGN KEY (visitorId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_friends (
profileId int(10) unsigned NOT NULL,
friendId int(10) unsigned NOT NULL,
requestDate datetime DEFAULT NULL,
pending tinyint(1) unsigned NOT NULL DEFAULT 0,
INDEX profileId (profileId),
INDEX friendId (friendId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId),
FOREIGN KEY (friendId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS ph7_members_wall (
wallId int(10) unsigned NOT NULL AUTO_INCREMENT,
profileId int(10) unsigned NOT NULL DEFAULT 0,
post text CHARACTER SET armscii8,
createdDate datetime NULL,
updatedDate datetime DEFAULT NULL,
PRIMARY KEY (wallId),
FOREIGN KEY (profileId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_messages (
messageId int(10) unsigned NOT NULL AUTO_INCREMENT,
sender int(10) unsigned NOT NULL DEFAULT 0,
recipient int(10) unsigned NOT NULL DEFAULT 0,
title varchar(30) NOT NULL DEFAULT '',
message text NOT NULL,
sendDate datetime NULL,
status tinyint(1) unsigned NOT NULL DEFAULT 1, -- 1 = Unread | 0 = Read
trash set('sender','recipient') NOT NULL DEFAULT '',
toDelete set('sender','recipient') NOT NULL DEFAULT '',
PRIMARY KEY (messageId),
-- This is wrong, because now administrators can also send emails.
-- FOREIGN KEY (sender) REFERENCES ph7_members(profileId),
FOREIGN KEY (recipient) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_messenger (
messengerId int(10) unsigned NOT NULL AUTO_INCREMENT,
fromUser varchar(40) NOT NULL DEFAULT '',
toUser varchar(40) NOT NULL DEFAULT '',
message text NOT NULL,
sent datetime NULL,
recd int(10) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (messengerId),
FOREIGN KEY (fromUser) REFERENCES ph7_members(username),
FOREIGN KEY (toUser) REFERENCES ph7_members(username)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_meta_main (
langId varchar(5) NOT NULL DEFAULT '',
pageTitle varchar(100) NOT NULL,
metaDescription varchar(191) NOT NULL,
metaKeywords varchar(191) NOT NULL,
headline varchar(50) NOT NULL,
slogan varchar(191) NOT NULL,
promoText text DEFAULT NULL,
metaRobots varchar(50) NOT NULL DEFAULT '',
metaAuthor varchar(50) NOT NULL DEFAULT '',
metaCopyright varchar(55) NOT NULL DEFAULT '',
metaRating varchar(50) NOT NULL DEFAULT '',
metaDistribution varchar(50) NOT NULL DEFAULT '',
metaCategory varchar(50) NOT NULL DEFAULT '',
PRIMARY KEY (langId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
INSERT INTO ph7_meta_main (langId, pageTitle, metaDescription, metaKeywords, headline, slogan, promoText, metaRobots, metaAuthor, metaCopyright, metaRating, metaDistribution, metaCategory) VALUES
('en_US', 'Home', 'The Best Online Social Dating Service to meet people and keep in touch with your friends', 'meet people, community, single, friends, meet singles, women, men, dating site, dating service, dating website, online dating website', 'Be on the right place!', 'The place to meet lovely people!', 'You''re on the best place for meeting new people nearby! Chat, Flirt, Socialize and have Fun!<br />Create any Social Dating Web Apps or Websites like this one with the #1 <a href="https://ph7cms.com">Dating Web App Builder</a>. It''s Professional, Modern, Open Source, and gives you the Best Way to launch a new Social/Dating Business!', 'index, follow, all', 'Pierre-Henry Soria (pH7CMS.com)', 'Copyright Pierre-Henry Soria. All Rights Reserved.', 'general', 'global', 'dating');
CREATE TABLE IF NOT EXISTS ph7_sys_mods_enabled (
moduleId tinyint(2) unsigned NOT NULL AUTO_INCREMENT,
moduleTitle varchar(50) NOT NULL,
folderName varchar(20) NOT NULL,
premiumMod enum('0','1') NOT NULL DEFAULT '0',
enabled enum('0','1') NOT NULL DEFAULT '1',
PRIMARY KEY (moduleId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_sys_mods_enabled (moduleTitle, folderName, premiumMod, enabled) VALUES
('Affiliate', 'affiliate', '0', '1'),
('Chat', 'chat', '1', '1'),
('Chatroulette', 'chatroulette', '1', '1'),
('Photo', 'picture', '0', '1'),
('Video', 'video', '0', '1'),
('Hot or Not', 'hotornot', '0', '1'),
('Forum', 'forum', '0', '1'),
('Note (blog for users)', 'note', '0', '1'),
('Blog (company blog)', 'blog', '0', '1'),
('Love Calculator', 'love-calculator', '0', '1'),
('Mail (private message)', 'mail', '0', '1'),
('Instant Messaging (IM)', 'im', '0', '1'),
('Friends', 'friend', '0', '1'),
('Related Profiles', 'related-profile', '0', '1'),
('User Dashboard', 'user-dashboard', '0', '1'),
('Dating-Style Profile Page', 'cool-profile-page', '0', '1'),
('Birthday: Let''s Celebrate Birthdays', 'birthday', '0', '1'),
('Google Maps', 'map', '0', '1'),
('Game', 'game', '0', '0'),
('Newsletter', 'newsletter', '0', '1'),
('Invite Friends', 'invite', '0', '1'),
('SMS Verification', 'sms-verification', '0', '0'),
('Social Media Auth (connect)', 'connect', '0', '0'),
('Progressive Web App (HTTPS required)', 'pwa', '0', '0');
CREATE TABLE IF NOT EXISTS ph7_modules (
moduleId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
vendorName varchar(40) NOT NULL,
moduleName varchar(40) NOT NULL,
version varchar(6) NOT NULL,
uri varchar(40) DEFAULT NULL,
path varchar(191) DEFAULT NULL,
active enum('0','1') NOT NULL DEFAULT '0',
PRIMARY KEY (moduleId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_modules (vendorName, moduleName, version, active) VALUES
/* Gives the current version of pH7CMS SQL schema (this helps to update and shows whether it is necessary or not to update the database as well) */
('pH7CMS', 'SQL System Schema', '1.6.0', 1);
CREATE TABLE IF NOT EXISTS ph7_report (
reportId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
reporterId int(10) unsigned DEFAULT NULL,
spammerId int(10) unsigned DEFAULT NULL,
dateTime datetime DEFAULT NULL,
contentType enum('user', 'avatar', 'mail', 'comment', 'picture', 'video', 'forum', 'note') NOT NULL DEFAULT 'user',
description varchar(191) DEFAULT NULL,
url varchar(191) DEFAULT NULL,
PRIMARY KEY (reportId),
FOREIGN KEY (reporterId) REFERENCES ph7_members(profileId),
FOREIGN KEY (spammerId) REFERENCES ph7_members(profileId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_settings (
settingName varchar(64) NOT NULL,
settingValue varchar(150) DEFAULT '',
description varchar(120) DEFAULT '' COMMENT 'Informative desc about the setting',
settingGroup varchar(12) NOT NULL,
PRIMARY KEY (settingName)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
INSERT INTO ph7_settings (settingName, settingValue, description, settingGroup) VALUES
('siteName', @sDefaultSiteName, '', 'general'),
('defaultLanguage', 'en_US', '', 'language'),
('defaultTemplate', @sDefaultTemplate, '', 'design'),
('navbarType', 'default', 'Choose between "default" or "dark"', 'design'),
('backgroundColor', '', 'Override background color. Leave empty to disable', 'design'),
('textColor', '', 'Override text color. Leave empty to disable', 'design'),
('heading1Color', '', 'Override H1 color. Leave empty to disable', 'design'),
('heading2Color', '', 'Override H2 color. Leave empty to disable', 'design'),
('heading3Color', '', 'Override H3 color. Leave empty to disable', 'design'),
('linkColor', '', 'Override links color. Leave empty to disable', 'design'),
('footerLinkColor', '', 'Override footer links color. Leave empty to disable', 'design'),
('linkHoverColor', '', 'Override links hover color. Leave empty to disable', 'design'),
('defaultSysModule', @sDefaultSysModule, 'The default module running by default on the index page. Recommended to keep the "user" module', 'general'),
('returnEmail', @sNoReplyEmail, 'Usually noreply@yoursite.com', 'email'),
('adminEmail', @sAdminEmail, '', 'email'),
('feedbackEmail', @sFeedbackEmail, '', 'email'),
('emailName', 'pH7CMS.com', '', 'email'),
('splashPage', 1, 'Use Splash Page | enable = 1 or disable = 0', 'homepage'),
('usersBlock', 1, '0 to disable | 1 to enable the profile photos on the homepage', 'homepage'),
('profileWithAvatarSet', 0, '1 to display only the profiles with a profile photo.', 'homepage'),
('bgSplashVideo', 1, '0 to disable or 1 to enable the background splash video', 'homepage'),
('numberProfileSplashPage', 44, 'Number of profiles to display on the splash homepage', 'homepage'),
('ipLogin', '', '', 'security'),
('timeDelayUserRegistration', 1440, '1440 minutes = 24 hours (in minutes!)', 'spam'),
('timeDelayAffRegistration', 2880, '2880 minutes = 2 days (in minutes!)', 'spam'),
('timeDelaySendNote', 20, 'Waiting time to add a new note post, in minutes!', 'spam'),
('timeDelaySendMail', 3, 'Waiting time to send a new message, in minutes!', 'spam'),
('timeDelaySendComment', 5, 'Waiting time to send a new comment, in minutes!', 'spam'),
('timeDelaySendForumTopic', 5, 'Waiting time to send a new topic in the forum, in minutes!', 'spam'),
('timeDelaySendForumMsg', 10, 'Waiting time to send a reply message in the same topic, in minutes!', 'spam'),
('captchaComplexity', 5, 'number of captcha complexity', 'spam'),
('captchaCaseSensitive', 1, '1 to enable captcha case sensitive | 0 to enable', 'spam'),
('isCaptchaUserSignup', 0, '0 to disable or 1 to enable', 'spam'),
('isCaptchaAffiliateSignup', 0, '0 to disable or 1 to enable', 'spam'),
('isCaptchaMail', 0, '0 to disable or 1 to enable', 'spam'),
('isCaptchaComment', 0, '0 to disable or 1 to enable', 'spam'),
('isCaptchaForum', 0, '0 to disable or 1 to enable', 'spam'),
('isCaptchaNote', 0, '0 to disable or 1 to enable', 'spam'),
('mapType', 'roadmap', 'Choose between: ''roadmap'', ''hybrid'', ''terrain'', ''satellite''', 'map'),
('isUserAgeRangeField', 1, '0 to disable or 1 to enable', 'registration'),
('maxAgeRegistration', 99, '', 'registration'),
('minAgeRegistration', 18, '', 'registration'),
('minUsernameLength', 3, '', 'registration'),
('maxUsernameLength', 30, '', 'registration'),
('requireRegistrationAvatar', 0, '', 'registration'),
('userActivationType', 1, '1 = no activation, 2 = email activation, 3 = manual activation by admin, 4 = SMS activation', 'registration'),
('affActivationType', 1, '1 = no activation, 2 = email activation, 3 = Manual activation by the administrator', 'registration'),
('defaultMembershipGroupId', @iUserRegularGroup, 'Default Membership Group', 'registration'),
('minPasswordLength', 6, '', 'security'),
('maxPasswordLength', 60, '', 'security'),
('isUserLoginAttempt', 1, 'Enable blocking connection attempts abusive. Enable = 1 or disable = 0', 'security'),
('isAdminLoginAttempt', 1, 'Enable blocking connection attempts abusive. Enable = 1 or disable = 0', 'security'),
('isAffiliateLoginAttempt', 1, 'Enable blocking connection attempts abusive. Enable = 1 or disable = 0', 'security'),
('maxUserLoginAttempts', 20, 'Maximum login attempts before blocking', 'security'),
('maxAffiliateLoginAttempts', 15, 'Maximum login attempts before blocking', 'security'),
('maxAdminLoginAttempts', 10, 'Maximum login attempts before blocking', 'security'),
('loginUserAttemptTime', 60, 'Time before a new connection attempt, in minutes!', 'security'),
('loginAffiliateAttemptTime', 60, 'Time before a new connection attempt, in minutes!', 'security'),
('loginAdminAttemptTime', 120, 'Time before a new connection attempt, in minutes!', 'security'),
('isUserSessionIpCheck', 0, 'Enable it to Protect against session hijacking. Disable it if use dynamic IPs', 'security'),
('isAffiliateSessionIpCheck', 1, 'Enable it to Protect against session hijacking. Disable it if use dynamic IPs', 'security'),
('isAdminSessionIpCheck', 1, 'Enable it to Protect against session hijacking. Disable it if use dynamic IPs', 'security'),
('avatarManualApproval', 0, '0 to disable or 1 to enable ', 'moderation'),
('bgProfileManualApproval', 0, 'Background Profile Manual Approval. 0 to disable or 1 to enable ', 'moderation'),
('noteManualApproval', 0, '0 to disable or 1 to enable ', 'moderation'),
('pictureManualApproval', 0, '0 to disable or 1 to enable ', 'moderation'),
('videoManualApproval', 0, '0 to disable or 1 to enable ', 'moderation'),
('nudityFilter', 0, '1 = enable | 0 = disable', 'moderation'),
('defaultVideo', @sDefaultVideoUrl, 'Video by default if no video is found', 'video'),
('autoplayVideo', 1, '1 = Autoplay is enabled, 0 = Autoplay is disabled', 'video'),
('sendReportMail', 1, 'Send the Report by eMail (1 = enable, 0 = disable)', 'security'),
('siteStatus', 'enable', 'enable or maintenance', 'general'),
('mailType', 'mail', '', 'email'),
('smtpHostName', 'mail.example.com', '', 'email'),
('smtpPassword', 123456, '', 'email'),
('smtpPort', 25, '', 'email'),
('watermarkTextImage', 'pH7CMS.com', 'Watermark text', 'image'),
('sizeWatermarkTextImage', 2, 'Between 0 to 5', 'image'),
('banWordReplace', '[removed]', '', 'security'),
('securityToken', 0, '0 to disable or 1 to enable the CSRF security token in the forms', 'security'),
('securityTokenLifetime', 720, 'Time in seconds to the CSRF security token. Default 720 seconds (12 mins)', 'security'),
('DDoS', 0, '0 to disabled or 1 to enabled DDoS attack protection', 'security'),
('isSiteValidated', 0, '0 = site not validated | 1 = site validated', 'security'),
('cleanMsg', 0, 'Delete messages older than X days. 0 = Disable', 'pruning'),
('cleanComment', 0, 'Delete comments older than X days. 0 = Disable', 'pruning'),
('cleanMessenger', 0, 'Delete IM messages older than X days. 0 = Disable', 'pruning'),
('ipApi', @sIpApiUrl, 'IP Api URL', 'api'),
('chatApi', @sChatApiUrl, 'Chat Api URL', 'api'),
('chatrouletteApi', @sChatrouletteApiUrl, 'Chatroulette Api URL', 'api'),
('googleApiKey', '', 'Google Maps API key https://developers.google.com/maps/documentation/javascript/get-api-key', 'api'),
('cronSecurityHash', 'change_this_secret_cron_word_by_yours', 'The secret word for the URL of the cron', 'automation'),
('userTimeout', 1, 'User inactivity timeout. The number of minutes that a member becomes inactive (offline)', 'automation'),
('socialMediaWidgets', 0, 'Enable the Social Media Widgets such as Like and Sharing buttons. 0 = Disable | 1 = Enable', 'general'),
('wysiwygEditorForum', 0, 'Enable or not the WYSIWYG. 0 = Disable | 1 = Enable', 'general'),
('disclaimer', 0, 'Enable a disclaimer to enter to the site. This is useful for sites with adult content. 0 = Disable | 1 = Enable', 'general'),
('cookieConsentBar', 0, 'Enable the cookie consent bar to prevent your users that your site uses cookies. 0 = Disable | 1 = Enable', 'general'),
('displayPoweredByLink', 1, 'Show or not the branding link in the footer.', 'general'),
('isSoftwareNewsFeed', 1, 'Enable the news feed. 0 = Disable | 1 = Enable', 'general');
CREATE TABLE IF NOT EXISTS ph7_subscribers (
profileId int(10) unsigned NOT NULL AUTO_INCREMENT,
name varchar(191) NOT NULL,
email varchar(120) NOT NULL,
joinDate datetime DEFAULT NULL,
active tinyint(1) unsigned NOT NULL DEFAULT 2, -- 1 = Active Account, 2 = Pending Account
ip varchar(45) NOT NULL DEFAULT '127.0.0.1',
hashValidation varchar(40) DEFAULT NULL,
affiliatedId int(10) unsigned NOT NULL DEFAULT 0,
PRIMARY KEY (profileId),
UNIQUE KEY (email)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_top_menus (
menuId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
vendorName varchar(40) NOT NULL,
moduleName varchar(40) NOT NULL,
controllerName varchar(40) NOT NULL,
actionName varchar(40) NOT NULL,
vars varchar(60) DEFAULT NULL,
parentMenu smallint(4) unsigned DEFAULT NULL,
grandParentMenu smallint(4) unsigned DEFAULT NULL,
onlyForUsers enum('0','1') NOT NULL DEFAULT '0',
active enum('0','1') NOT NULL DEFAULT '0',
PRIMARY KEY (menuId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_bottom_menus (
menuId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
vendorName varchar(40) NOT NULL,
moduleName varchar(40) NOT NULL,
controllerName varchar(40) NOT NULL,
actionName varchar(40) NOT NULL,
vars varchar(60) DEFAULT NULL,
parentMenu smallint(4) unsigned DEFAULT NULL,
grandParentMenu smallint(4) unsigned DEFAULT NULL,
active enum('0','1') NOT NULL DEFAULT '0',
PRIMARY KEY (menuId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_static_files (
staticId smallint(4) unsigned NOT NULL AUTO_INCREMENT,
file varchar(191) NOT NULL,
fileType enum('css', 'js') NOT NULL,
active enum('1','0') DEFAULT '1',
PRIMARY KEY (staticId)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_static_files VALUES (1, '//static.addtoany.com/menu/page.js', 'js', '0');
CREATE TABLE IF NOT EXISTS ph7_custom_code (
code text,
codeType enum('css', 'js') NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_custom_code VALUES
('/* Your custom CSS code here */\r\n', 'css'),
('/* Your custom JS code here */\r\n', 'js');
CREATE TABLE IF NOT EXISTS ph7_block_countries (
countryId tinyint(3) unsigned NOT NULL AUTO_INCREMENT,
countryCode char(2) NOT NULL,
PRIMARY KEY (countryId),
UNIQUE KEY (countryCode)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
CREATE TABLE IF NOT EXISTS ph7_members_countries (
countryId tinyint(3) unsigned NOT NULL AUTO_INCREMENT,
countryCode char(2) NOT NULL,
PRIMARY KEY (countryId),
UNIQUE KEY (countryCode)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_members_countries (countryCode) VALUES
('AD'),
('AE'),
('AF'),
('AG'),
('AI'),
('AL'),
('AM'),
('AN'),
('AO'),
('AQ'),
('AR'),
('AS'),
('AT'),
('AU'),
('AW'),
('AX'),
('AZ'),
('BA'),
('BB'),
('BD'),
('BE'),
('BF'),
('BG'),
('BH'),
('BI'),
('BJ'),
('BM'),
('BN'),
('BO'),
('BR'),
('BS'),
('BT'),
('BV'),
('BW'),
('BY'),
('BZ'),
('CA'),
('CC'),
('CD'),
('CF'),
('CG'),
('CH'),
('CI'),
('CK'),
('CL'),
('CM'),
('CN'),
('CO'),
('CR'),
('CU'),
('CV'),
('CX'),
('CY'),
('CZ'),
('DE'),
('DJ'),
('DK'),
('DM'),
('DO'),
('DZ'),
('EC'),
('EE'),
('EG'),
('EH'),
('ER'),
('ES'),
('ET'),
('FI'),
('FJ'),
('FK'),
('FM'),
('FO'),
('FR'),
('FX'),
('GA'),
('GD'),
('GE'),
('GF'),
('GH'),
('GI'),
('GL'),
('GM'),
('GN'),
('GP'),
('GQ'),
('GR'),
('GS'),
('GT'),
('GU'),
('GW'),
('GY'),
('HK'),
('HM'),
('HN'),
('HR'),
('HT'),
('HU'),
('ID'),
('IE'),
('IL'),
('IN'),
('IO'),
('IQ'),
('IR'),
('IS'),
('IT'),
('JM'),
('JO'),
('JP'),
('KE'),
('KG'),
('KH'),
('KI'),
('KM'),
('KN'),
('KP'),
('KR'),
('KW'),
('KY'),
('KZ'),
('LA'),
('LB'),
('LC'),
('LI'),
('LK'),
('LR'),
('LS'),
('LT'),
('LU'),
('LV'),
('LY'),
('MA'),
('MC'),
('MD'),
('ME'),
('MG'),
('MH'),
('MK'),
('ML'),
('MM'),
('MN'),
('MO'),
('MP'),
('MQ'),
('MR'),
('MS'),
('MT'),
('MU'),
('MV'),
('MW'),
('MX'),
('MY'),
('MZ'),
('NA'),
('NC'),
('NE'),
('NF'),
('NG'),
('NI'),
('NL'),
('NO'),
('NP'),
('NR'),
('NU'),
('NZ'),
('OM'),
('PA'),
('PE'),
('PF'),
('PG'),
('PH'),
('PK'),
('PL'),
('PM'),
('PN'),
('PR'),
('PT'),
('PW'),
('PY'),
('QA'),
('RE'),
('RO'),
('RS'),
('RU'),
('RW'),
('SA'),
('SB'),
('SC'),
('SD'),
('SE'),
('SG'),
('SH'),
('SI'),
('SJ'),
('SK'),
('SL'),
('SM'),
('SN'),
('SO'),
('SR'),
('ST'),
('SV'),
('SY'),
('SZ'),
('TC'),
('TD'),
('TF'),
('TG'),
('TH'),
('TJ'),
('TK'),
('TM'),
('TN'),
('TO'),
('TP'),
('TR'),
('TT'),
('TV'),
('TW'),
('TZ'),
('UA'),
('UG'),
('UK'),
('UM'),
('US'),
('UY'),
('UZ'),
('VA'),
('VC'),
('VE'),
('VG'),
('VI'),
('VN'),
('VU'),
('WF'),
('WS'),
('XK'),
('YE'),
('YT'),
('ZA'),
('ZM'),
('ZW');
CREATE TABLE IF NOT EXISTS ph7_affiliates_countries (
countryId tinyint(3) unsigned NOT NULL AUTO_INCREMENT,
countryCode char(2) NOT NULL,
PRIMARY KEY (countryId),
UNIQUE KEY (countryCode)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 AUTO_INCREMENT=1;
INSERT INTO ph7_affiliates_countries (countryCode) VALUES
('AD'),
('AE'),
('AF'),
('AG'),
('AI'),
('AL'),
('AM'),
('AN'),
('AO'),
('AQ'),
('AR'),
('AS'),
('AT'),
('AU'),
('AW'),
('AX'),
('AZ'),
('BA'),
('BB'),
('BD'),
('BE'),
('BF'),
('BG'),
('BH'),
('BI'),
('BJ'),
('BM'),
('BN'),
('BO'),
('BR'),
('BS'),
('BT'),
('BV'),
('BW'),
('BY'),
('BZ'),
('CA'),
('CC'),
('CD'),
('CF'),
('CG'),
('CH'),
('CI'),
('CK'),
('CL'),
('CM'),
('CN'),
('CO'),
('CR'),
('CU'),
('CV'),
('CX'),
('CY'),
('CZ'),
('DE'),
('DJ'),
('DK'),
('DM'),
('DO'),
('DZ'),
('EC'),
('EE'),
('EG'),
('EH'),
('ER'),
('ES'),
('ET'),
('FI'),
('FJ'),
('FK'),
('FM'),
('FO'),
('FR'),
('FX'),
('GA'),
('GD'),
('GE'),
('GF'),
('GH'),
('GI'),
('GL'),
('GM'),
('GN'),
('GP'),
('GQ'),
('GR'),
('GS'),
('GT'),
('GU'),
('GW'),
('GY'),
('HK'),
('HM'),
('HN'),
('HR'),
('HT'),
('HU'),
('ID'),
('IE'),
('IL'),
('IN'),
('IO'),
('IQ'),
('IR'),
('IS'),
('IT'),
('JM'),
('JO'),
('JP'),
('KE'),
('KG'),
('KH'),
('KI'),
('KM'),
('KN'),
('KP'),
('KR'),
('KW'),
('KY'),
('KZ'),
('LA'),
('LB'),
('LC'),
('LI'),
('LK'),
('LR'),
('LS'),
('LT'),
('LU'),
('LV'),
('LY'),
('MA'),
('MC'),
('MD'),
('ME'),
('MG'),
('MH'),
('MK'),
('ML'),
('MM'),
('MN'),
('MO'),
('MP'),
('MQ'),
('MR'),
('MS'),
('MT'),
('MU'),
('MV'),
('MW'),
('MX'),
('MY'),
('MZ'),
('NA'),
('NC'),
('NE'),
('NF'),
('NG'),
('NI'),
('NL'),
('NO'),
('NP'),
('NR'),
('NU'),
('NZ'),
('OM'),
('PA'),
('PE'),
('PF'),
('PG'),
('PH'),
('PK'),
('PL'),
('PM'),
('PN'),
('PR'),
('PT'),
('PW'),
('PY'),
('QA'),
('RE'),
('RO'),
('RS'),
('RU'),
('RW'),
('SA'),
('SB'),
('SC'),
('SD'),
('SE'),
('SG'),
('SH'),
('SI'),
('SJ'),
('SK'),
('SL'),
('SM'),
('SN'),
('SO'),
('SR'),
('ST'),
('SV'),
('SY'),
('SZ'),
('TC'),
('TD'),
('TF'),
('TG'),
('TH'),
('TJ'),
('TK'),
('TM'),
('TN'),
('TO'),
('TP'),
('TR'),
('TT'),
('TV'),
('TW'),
('TZ'),
('UA'),
('UG'),
('UK'),
('UM'),
('US'),
('UY'),
('UZ'),
('VA'),
('VC'),
('VE'),
('VG'),
('VI'),
('VN'),
('VU'),
('WF'),
('WS'),
('XK'),
('YE'),
('YT'),
('ZA'),
('ZM'),
('ZW'); | the_stack |
-- JSON_EXISTS
SELECT JSON_EXISTS(NULL::jsonb, '$');
SELECT JSON_EXISTS(jsonb '[]', '$');
SELECT JSON_EXISTS(JSON_OBJECT(RETURNING jsonb), '$');
SELECT JSON_EXISTS(jsonb '1', '$');
SELECT JSON_EXISTS(jsonb 'null', '$');
SELECT JSON_EXISTS(jsonb '[]', '$');
SELECT JSON_EXISTS(jsonb '1', '$.a');
SELECT JSON_EXISTS(jsonb '1', 'strict $.a');
SELECT JSON_EXISTS(jsonb '1', 'strict $.a' ERROR ON ERROR);
SELECT JSON_EXISTS(jsonb 'null', '$.a');
SELECT JSON_EXISTS(jsonb '[]', '$.a');
SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'strict $.a');
SELECT JSON_EXISTS(jsonb '[1, "aaa", {"a": 1}]', 'lax $.a');
SELECT JSON_EXISTS(jsonb '{}', '$.a');
SELECT JSON_EXISTS(jsonb '{"b": 1, "a": 2}', '$.a');
SELECT JSON_EXISTS(jsonb '1', '$.a.b');
SELECT JSON_EXISTS(jsonb '{"a": {"b": 1}}', '$.a.b');
SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.a.b');
SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING 1 AS x);
SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x)' PASSING '1' AS x);
SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 2 AS y);
SELECT JSON_EXISTS(jsonb '{"a": 1, "b": 2}', '$.* ? (@ > $x && @ < $y)' PASSING 0 AS x, 1 AS y);
-- extension: boolean expressions
SELECT JSON_EXISTS(jsonb '1', '$ > 2');
SELECT JSON_EXISTS(jsonb '1', '$.a > 2' ERROR ON ERROR);
-- extension: RETURNING clause
SELECT JSON_EXISTS(jsonb '1', '$[0]' RETURNING bool);
SELECT JSON_EXISTS(jsonb '1', '$[1]' RETURNING bool);
SELECT JSON_EXISTS(jsonb '1', '$[0]' RETURNING int);
SELECT JSON_EXISTS(jsonb '1', '$[1]' RETURNING int);
SELECT JSON_EXISTS(jsonb '1', '$[0]' RETURNING text);
SELECT JSON_EXISTS(jsonb '1', '$[1]' RETURNING text);
SELECT JSON_EXISTS(jsonb '1', 'strict $[1]' RETURNING text FALSE ON ERROR);
SELECT JSON_EXISTS(jsonb '1', '$[0]' RETURNING jsonb);
SELECT JSON_EXISTS(jsonb '1', '$[0]' RETURNING float4);
-- JSON_VALUE
SELECT JSON_VALUE(NULL::jsonb, '$');
SELECT JSON_VALUE(jsonb 'null', '$');
SELECT JSON_VALUE(jsonb 'null', '$' RETURNING int);
SELECT JSON_VALUE(jsonb 'true', '$');
SELECT JSON_VALUE(jsonb 'true', '$' RETURNING bool);
SELECT JSON_VALUE(jsonb '123', '$');
SELECT JSON_VALUE(jsonb '123', '$' RETURNING int) + 234;
SELECT JSON_VALUE(jsonb '123', '$' RETURNING text);
/* jsonb bytea ??? */
SELECT JSON_VALUE(jsonb '123', '$' RETURNING bytea ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '1.23', '$');
SELECT JSON_VALUE(jsonb '1.23', '$' RETURNING int);
SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING numeric);
SELECT JSON_VALUE(jsonb '"1.23"', '$' RETURNING int ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '"aaa"', '$');
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING text);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(5));
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING char(2));
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING json ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING jsonb ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING json);
SELECT JSON_VALUE(jsonb '"\"aaa\""', '$' RETURNING jsonb);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '"aaa"', '$' RETURNING int DEFAULT 111 ON ERROR);
SELECT JSON_VALUE(jsonb '"123"', '$' RETURNING int) + 234;
SELECT JSON_VALUE(jsonb '"2017-02-20"', '$' RETURNING date) + 9;
-- Test NULL checks execution in domain types
CREATE DOMAIN sqljsonb_int_not_null AS int NOT NULL;
SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null);
SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null NULL ON ERROR);
SELECT JSON_VALUE(jsonb '1', '$.a' RETURNING sqljsonb_int_not_null DEFAULT NULL ON ERROR);
SELECT JSON_VALUE(jsonb '[]', '$');
SELECT JSON_VALUE(jsonb '[]', '$' ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '{}', '$');
SELECT JSON_VALUE(jsonb '{}', '$' ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '1', '$.a');
SELECT JSON_VALUE(jsonb '1', 'strict $.a' ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 'error' ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'strict $.a' DEFAULT 2 ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT 2 ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' NULL ON EMPTY DEFAULT '2' ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' DEFAULT '2' ON EMPTY DEFAULT '3' ON ERROR);
SELECT JSON_VALUE(jsonb '1', 'lax $.a' ERROR ON EMPTY DEFAULT '3' ON ERROR);
SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '[1,2]', '$[*]' DEFAULT '0' ON ERROR);
SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int ERROR ON ERROR);
SELECT JSON_VALUE(jsonb '[" "]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR);
SELECT JSON_VALUE(jsonb '["1"]', '$[*]' RETURNING int DEFAULT 2 + 3 ON ERROR);
SELECT
x,
JSON_VALUE(
jsonb '{"a": 1, "b": 2}',
'$.* ? (@ > $x)' PASSING x AS x
RETURNING int
DEFAULT -1 ON EMPTY
DEFAULT -2 ON ERROR
) y
FROM
generate_series(0, 2) x;
SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a);
SELECT JSON_VALUE(jsonb 'null', '$a' PASSING point ' (1, 2 )' AS a RETURNING point);
-- Test timestamptz passing and output
SELECT JSON_VALUE(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts);
SELECT JSON_VALUE(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING timestamptz);
SELECT JSON_VALUE(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING timestamp);
SELECT JSON_VALUE(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING json);
SELECT JSON_VALUE(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING jsonb);
-- JSON_QUERY
SELECT
JSON_QUERY(js, '$'),
JSON_QUERY(js, '$' WITHOUT WRAPPER),
JSON_QUERY(js, '$' WITH CONDITIONAL WRAPPER),
JSON_QUERY(js, '$' WITH UNCONDITIONAL ARRAY WRAPPER),
JSON_QUERY(js, '$' WITH ARRAY WRAPPER)
FROM
(VALUES
(jsonb 'null'),
('12.3'),
('true'),
('"aaa"'),
('[1, null, "2"]'),
('{"a": 1, "b": [2]}')
) foo(js);
SELECT
JSON_QUERY(js, 'strict $[*]') AS "unspec",
JSON_QUERY(js, 'strict $[*]' WITHOUT WRAPPER) AS "without",
JSON_QUERY(js, 'strict $[*]' WITH CONDITIONAL WRAPPER) AS "with cond",
JSON_QUERY(js, 'strict $[*]' WITH UNCONDITIONAL ARRAY WRAPPER) AS "with uncond",
JSON_QUERY(js, 'strict $[*]' WITH ARRAY WRAPPER) AS "with"
FROM
(VALUES
(jsonb '1'),
('[]'),
('[null]'),
('[12.3]'),
('[true]'),
('["aaa"]'),
('[[1, 2, 3]]'),
('[{"a": 1, "b": [2]}]'),
('[1, "2", null, [3]]')
) foo(js);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text KEEP QUOTES ON SCALAR STRING);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING text OMIT QUOTES ON SCALAR STRING);
SELECT JSON_QUERY(jsonb '"aaa"', '$' OMIT QUOTES ERROR ON ERROR);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING json OMIT QUOTES ERROR ON ERROR);
SELECT JSON_QUERY(jsonb '"aaa"', '$' RETURNING bytea FORMAT JSON OMIT QUOTES ERROR ON ERROR);
-- QUOTES behavior should not be specified when WITH WRAPPER used:
-- Should fail
SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER OMIT QUOTES);
SELECT JSON_QUERY(jsonb '[1]', '$' WITH WRAPPER KEEP QUOTES);
SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER KEEP QUOTES);
SELECT JSON_QUERY(jsonb '[1]', '$' WITH CONDITIONAL WRAPPER OMIT QUOTES);
-- Should succeed
SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER OMIT QUOTES);
SELECT JSON_QUERY(jsonb '[1]', '$' WITHOUT WRAPPER KEEP QUOTES);
SELECT JSON_QUERY(jsonb '[]', '$[*]');
SELECT JSON_QUERY(jsonb '[]', '$[*]' NULL ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY ARRAY ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' EMPTY OBJECT ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' DEFAULT '"empty"' ON EMPTY);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY NULL ON ERROR);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY ARRAY ON ERROR);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY EMPTY OBJECT ON ERROR);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON EMPTY ERROR ON ERROR);
SELECT JSON_QUERY(jsonb '[]', '$[*]' ERROR ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' ERROR ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' DEFAULT '"empty"' ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING json FORMAT JSON);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING jsonb FORMAT JSON);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(10));
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING char(3));
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING text FORMAT JSON);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea);
SELECT JSON_QUERY(jsonb '[1,2]', '$' RETURNING bytea FORMAT JSON);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea EMPTY OBJECT ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING bytea FORMAT JSON EMPTY OBJECT ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING json EMPTY OBJECT ON ERROR);
SELECT JSON_QUERY(jsonb '[1,2]', '$[*]' RETURNING jsonb EMPTY OBJECT ON ERROR);
SELECT
x, y,
JSON_QUERY(
jsonb '[1,2,3,4,5,null]',
'$[*] ? (@ >= $x && @ <= $y)'
PASSING x AS x, y AS y
WITH CONDITIONAL WRAPPER
EMPTY ARRAY ON EMPTY
) list
FROM
generate_series(0, 4) x,
generate_series(0, 4) y;
-- Extension: record types returning
CREATE TYPE sqljsonb_rec AS (a int, t text, js json, jb jsonb, jsa json[]);
CREATE TYPE sqljsonb_reca AS (reca sqljsonb_rec[]);
SELECT JSON_QUERY(jsonb '[{"a": 1, "b": "foo", "t": "aaa", "js": [1, "2", {}], "jb": {"x": [1, "2", {}]}}, {"a": 2}]', '$[0]' RETURNING sqljsonb_rec);
SELECT * FROM unnest((JSON_QUERY(jsonb '{"jsa": [{"a": 1, "b": ["foo"]}, {"a": 2, "c": {}}, 123]}', '$' RETURNING sqljsonb_rec)).jsa);
SELECT * FROM unnest((JSON_QUERY(jsonb '{"reca": [{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]}', '$' RETURNING sqljsonb_reca)).reca);
-- Extension: array types returning
SELECT JSON_QUERY(jsonb '[1,2,null,"3"]', '$[*]' RETURNING int[] WITH WRAPPER);
SELECT * FROM unnest(JSON_QUERY(jsonb '[{"a": 1, "t": ["foo", []]}, {"a": 2, "jb": [{}, true]}]', '$' RETURNING sqljsonb_rec[]));
-- Extension: domain types returning
SELECT JSON_QUERY(jsonb '{"a": 1}', '$.a' RETURNING sqljsonb_int_not_null);
SELECT JSON_QUERY(jsonb '{"a": 1}', '$.b' RETURNING sqljsonb_int_not_null);
-- Test timestamptz passing and output
SELECT JSON_QUERY(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts);
SELECT JSON_QUERY(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING json);
SELECT JSON_QUERY(jsonb 'null', '$ts' PASSING timestamptz '2018-02-21 12:34:56 +10' AS ts RETURNING jsonb);
-- Test constraints
CREATE TABLE test_jsonb_constraints (
js text,
i int,
x jsonb DEFAULT JSON_QUERY(jsonb '[1,2]', '$[*]' WITH WRAPPER)
CONSTRAINT test_jsonb_constraint1
CHECK (js IS JSON)
CONSTRAINT test_jsonb_constraint2
CHECK (JSON_EXISTS(js::jsonb, '$.a' PASSING i + 5 AS int, i::text AS txt, array[1,2,3] as arr))
CONSTRAINT test_jsonb_constraint3
CHECK (JSON_VALUE(js::jsonb, '$.a' RETURNING int DEFAULT ('12' || i)::int ON EMPTY ERROR ON ERROR) > i)
CONSTRAINT test_jsonb_constraint4
CHECK (JSON_QUERY(js::jsonb, '$.a' WITH CONDITIONAL WRAPPER EMPTY OBJECT ON ERROR) < jsonb '[10]')
CONSTRAINT test_jsonb_constraint5
CHECK (JSON_QUERY(js::jsonb, '$.a' RETURNING char(5) OMIT QUOTES EMPTY ARRAY ON EMPTY) > 'a' COLLATE "C")
CONSTRAINT test_jsonb_constraint6
CHECK (JSON_EXISTS(js::jsonb, 'strict $.a' RETURNING int TRUE ON ERROR) < 2)
);
\d test_jsonb_constraints
SELECT check_clause
FROM information_schema.check_constraints
WHERE constraint_name LIKE 'test_jsonb_constraint%'
ORDER BY 1;
SELECT pg_get_expr(adbin, adrelid)
FROM pg_attrdef
WHERE adrelid = 'test_jsonb_constraints'::regclass
ORDER BY 1;
INSERT INTO test_jsonb_constraints VALUES ('', 1);
INSERT INTO test_jsonb_constraints VALUES ('1', 1);
INSERT INTO test_jsonb_constraints VALUES ('[]');
INSERT INTO test_jsonb_constraints VALUES ('{"b": 1}', 1);
INSERT INTO test_jsonb_constraints VALUES ('{"a": 1}', 1);
INSERT INTO test_jsonb_constraints VALUES ('{"a": 7}', 1);
INSERT INTO test_jsonb_constraints VALUES ('{"a": 10}', 1);
DROP TABLE test_jsonb_constraints;
-- Test mutabilily od query functions
CREATE TABLE test_jsonb_mutability(js jsonb);
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a[0]'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime()'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@ < $.datetime())'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@.datetime() < $.datetime())'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@.datetime() < $.datetime("HH:MI TZH"))'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@.datetime("HH:MI TZH") < $.datetime("HH:MI TZH"))'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@.datetime("HH:MI") < $.datetime("YY-MM-DD HH:MI"))'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.a ? (@.datetime("HH:MI TZH") < $.datetime("YY-MM-DD HH:MI"))'));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime("HH:MI TZH") < $x' PASSING '12:34'::timetz AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime("HH:MI TZH") < $y' PASSING '12:34'::timetz AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime() < $x' PASSING '12:34'::timetz AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime() < $x' PASSING '1234'::int AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime() ? (@ == $x)' PASSING '12:34'::time AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$.datetime("YY-MM-DD") ? (@ == $x)' PASSING '2020-07-14'::date AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$[1, $.a ? (@.datetime() == $x)]' PASSING '12:34'::time AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$[1, 0 to $.a ? (@.datetime() == $x)]' PASSING '12:34'::time AS x));
CREATE INDEX ON test_jsonb_mutability (JSON_QUERY(js, '$[1, $.a ? (@.datetime("HH:MI") == $x)]' PASSING '12:34'::time AS x));
DROP TABLE test_jsonb_mutability;
-- JSON_TABLE
-- Should fail (JSON_TABLE can be used only in FROM clause)
SELECT JSON_TABLE('[]', '$');
-- Should fail (no columns)
SELECT * FROM JSON_TABLE(NULL, '$' COLUMNS ());
SELECT * FROM JSON_TABLE (NULL::jsonb, '$' COLUMNS (v1 timestamp)) AS f (v1, v2);
-- NULL => empty table
SELECT * FROM JSON_TABLE(NULL::jsonb, '$' COLUMNS (foo int)) bar;
--
SELECT * FROM JSON_TABLE(jsonb '123', '$'
COLUMNS (item int PATH '$', foo int)) bar;
-- JSON_TABLE: basic functionality
CREATE DOMAIN jsonb_test_domain AS text CHECK (value <> 'foo');
SELECT *
FROM
(VALUES
('1'),
('[]'),
('{}'),
('[1, 1.23, "2", "aaaaaaa", "foo", null, false, true, {"aaa": 123}, "[1,2]", "\"str\""]')
) vals(js)
LEFT OUTER JOIN
-- JSON_TABLE is implicitly lateral
JSON_TABLE(
vals.js::jsonb, 'lax $[*]'
COLUMNS (
id FOR ORDINALITY,
id2 FOR ORDINALITY, -- allowed additional ordinality columns
"int" int PATH '$',
"text" text PATH '$',
"char(4)" char(4) PATH '$',
"bool" bool PATH '$',
"numeric" numeric PATH '$',
"domain" jsonb_test_domain PATH '$',
js json PATH '$',
jb jsonb PATH '$',
jst text FORMAT JSON PATH '$',
jsc char(4) FORMAT JSON PATH '$',
jsv varchar(4) FORMAT JSON PATH '$',
jsb jsonb FORMAT JSON PATH '$',
jsbq jsonb FORMAT JSON PATH '$' OMIT QUOTES,
aaa int, -- implicit path '$."aaa"',
aaa1 int PATH '$.aaa',
exists1 bool EXISTS PATH '$.aaa',
exists2 int EXISTS PATH '$.aaa',
exists3 int EXISTS PATH 'strict $.aaa' UNKNOWN ON ERROR,
exists4 text EXISTS PATH 'strict $.aaa' FALSE ON ERROR,
js2 json PATH '$',
jsb2w jsonb PATH '$' WITH WRAPPER,
jsb2q jsonb PATH '$' OMIT QUOTES,
ia int[] PATH '$',
ta text[] PATH '$',
jba jsonb[] PATH '$'
)
) jt
ON true;
-- JSON_TABLE: Test backward parsing
CREATE VIEW jsonb_table_view AS
SELECT * FROM
JSON_TABLE(
jsonb 'null', 'lax $[*]' PASSING 1 + 2 AS a, json '"foo"' AS "b c"
COLUMNS (
id FOR ORDINALITY,
id2 FOR ORDINALITY, -- allowed additional ordinality columns
"int" int PATH '$',
"text" text PATH '$',
"char(4)" char(4) PATH '$',
"bool" bool PATH '$',
"numeric" numeric PATH '$',
"domain" jsonb_test_domain PATH '$',
js json PATH '$',
jb jsonb PATH '$',
jst text FORMAT JSON PATH '$',
jsc char(4) FORMAT JSON PATH '$',
jsv varchar(4) FORMAT JSON PATH '$',
jsb jsonb FORMAT JSON PATH '$',
jsbq jsonb FORMAT JSON PATH '$' OMIT QUOTES,
aaa int, -- implicit path '$."aaa"',
aaa1 int PATH '$.aaa',
exists1 bool EXISTS PATH '$.aaa',
exists2 int EXISTS PATH '$.aaa' TRUE ON ERROR,
exists3 text EXISTS PATH 'strict $.aaa' UNKNOWN ON ERROR,
js2 json PATH '$',
jsb2w jsonb PATH '$' WITH WRAPPER,
jsb2q jsonb PATH '$' OMIT QUOTES,
ia int[] PATH '$',
ta text[] PATH '$',
jba jsonb[] PATH '$',
NESTED PATH '$[1]' AS p1 COLUMNS (
a1 int,
NESTED PATH '$[*]' AS "p1 1" COLUMNS (
a11 text
),
b1 text
),
NESTED PATH '$[2]' AS p2 COLUMNS (
NESTED PATH '$[*]' AS "p2:1" COLUMNS (
a21 text
),
NESTED PATH '$[*]' AS p22 COLUMNS (
a22 text
)
)
)
);
\sv jsonb_table_view
EXPLAIN (COSTS OFF, VERBOSE) SELECT * FROM jsonb_table_view;
DROP VIEW jsonb_table_view;
DROP DOMAIN jsonb_test_domain;
-- JSON_TABLE: ON EMPTY/ON ERROR behavior
SELECT *
FROM
(VALUES ('1'), ('"err"')) vals(js),
JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$')) jt;
SELECT *
FROM
(VALUES ('1'), ('"err"')) vals(js)
LEFT OUTER JOIN
JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$') ERROR ON ERROR) jt
ON true;
SELECT *
FROM
(VALUES ('1'), ('"err"')) vals(js)
LEFT OUTER JOIN
JSON_TABLE(vals.js::jsonb, '$' COLUMNS (a int PATH '$' ERROR ON ERROR)) jt
ON true;
SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH '$.a' ERROR ON EMPTY)) jt;
SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'strict $.a' ERROR ON EMPTY) ERROR ON ERROR) jt;
SELECT * FROM JSON_TABLE(jsonb '1', '$' COLUMNS (a int PATH 'lax $.a' ERROR ON EMPTY) ERROR ON ERROR) jt;
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH '$' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt;
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'strict $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt;
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int PATH 'lax $.a' DEFAULT 1 ON EMPTY DEFAULT 2 ON ERROR)) jt;
-- JSON_TABLE: EXISTS PATH types
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int4 EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int2 EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a int8 EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a float4 EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a char(3) EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a json EXISTS PATH '$.a'));
SELECT * FROM JSON_TABLE(jsonb '"a"', '$' COLUMNS (a jsonb EXISTS PATH '$.a'));
-- JSON_TABLE: nested paths and plans
-- Should fail (JSON_TABLE columns must contain explicit AS path
-- specifications if explicit PLAN clause is used)
SELECT * FROM JSON_TABLE(
jsonb '[]', '$' -- AS <path name> required here
COLUMNS (
foo int PATH '$'
)
PLAN DEFAULT (UNION)
) jt;
SELECT * FROM JSON_TABLE(
jsonb '[]', '$' AS path1
COLUMNS (
NESTED PATH '$' COLUMNS ( -- AS <path name> required here
foo int PATH '$'
)
)
PLAN DEFAULT (UNION)
) jt;
-- Should fail (column names must be distinct)
SELECT * FROM JSON_TABLE(
jsonb '[]', '$' AS a
COLUMNS (
a int
)
) jt;
SELECT * FROM JSON_TABLE(
jsonb '[]', '$' AS a
COLUMNS (
b int,
NESTED PATH '$' AS a
COLUMNS (
c int
)
)
) jt;
SELECT * FROM JSON_TABLE(
jsonb '[]', '$'
COLUMNS (
b int,
NESTED PATH '$' AS b
COLUMNS (
c int
)
)
) jt;
SELECT * FROM JSON_TABLE(
jsonb '[]', '$'
COLUMNS (
NESTED PATH '$' AS a
COLUMNS (
b int
),
NESTED PATH '$'
COLUMNS (
NESTED PATH '$' AS a
COLUMNS (
c int
)
)
)
) jt;
-- JSON_TABLE: plan validation
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p1)
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0)
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER p3)
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 UNION p1 UNION p11)
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER (p1 CROSS p13))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER (p1 CROSS p2))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER ((p1 UNION p11) CROSS p2))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER ((p1 INNER p11) CROSS p2))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', '$[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS p2))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', 'strict $[*]' AS p0
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN (p0 OUTER ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21)))
) jt;
SELECT * FROM JSON_TABLE(
jsonb 'null', 'strict $[*]' -- without root path name
COLUMNS (
NESTED PATH '$' AS p1 COLUMNS (
NESTED PATH '$' AS p11 COLUMNS ( foo int ),
NESTED PATH '$' AS p12 COLUMNS ( bar int )
),
NESTED PATH '$' AS p2 COLUMNS (
NESTED PATH '$' AS p21 COLUMNS ( baz int )
)
)
PLAN ((p1 INNER (p12 CROSS p11)) CROSS (p2 INNER p21))
) jt;
-- JSON_TABLE: plan execution
CREATE TEMP TABLE jsonb_table_test (js jsonb);
INSERT INTO jsonb_table_test
VALUES (
'[
{"a": 1, "b": [], "c": []},
{"a": 2, "b": [1, 2, 3], "c": [10, null, 20]},
{"a": 3, "b": [1, 2], "c": []},
{"x": "4", "b": [1, 2], "c": 123}
]'
);
-- unspecified plan (outer, union)
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
) jt;
-- default plan (outer, union)
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan default (outer, union)
) jt;
-- specific plan (p outer (pb union pc))
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan (p outer (pb union pc))
) jt;
-- specific plan (p outer (pc union pb))
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan (p outer (pc union pb))
) jt;
-- default plan (inner, union)
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan default (inner)
) jt;
-- specific plan (p inner (pb union pc))
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan (p inner (pb union pc))
) jt;
-- default plan (inner, cross)
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan default (cross, inner)
) jt;
-- specific plan (p inner (pb cross pc))
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan (p inner (pb cross pc))
) jt;
-- default plan (outer, cross)
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan default (outer, cross)
) jt;
-- specific plan (p outer (pb cross pc))
select
jt.*
from
jsonb_table_test jtt,
json_table (
jtt.js,'strict $[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on empty,
nested path 'strict $.b[*]' as pb columns ( b int path '$' ),
nested path 'strict $.c[*]' as pc columns ( c int path '$' )
)
plan (p outer (pb cross pc))
) jt;
select
jt.*, b1 + 100 as b
from
json_table (jsonb
'[
{"a": 1, "b": [[1, 10], [2], [3, 30, 300]], "c": [1, null, 2]},
{"a": 2, "b": [10, 20], "c": [1, null, 2]},
{"x": "3", "b": [11, 22, 33, 44]}
]',
'$[*]' as p
columns (
n for ordinality,
a int path 'lax $.a' default -1 on error,
nested path 'strict $.b[*]' as pb columns (
b text format json path '$',
nested path 'strict $[*]' as pb1 columns (
b1 int path '$'
)
),
nested path 'strict $.c[*]' as pc columns (
c text format json path '$',
nested path 'strict $[*]' as pc1 columns (
c1 int path '$'
)
)
)
--plan default(outer, cross)
plan(p outer ((pb inner pb1) cross (pc outer pc1)))
) jt;
-- Should succeed (JSON arguments are passed to root and nested paths)
SELECT *
FROM
generate_series(1, 4) x,
generate_series(1, 3) y,
JSON_TABLE(jsonb
'[[1,2,3],[2,3,4,5],[3,4,5,6]]',
'strict $[*] ? (@[*] < $x)'
PASSING x AS x, y AS y
COLUMNS (
y text FORMAT JSON PATH '$',
NESTED PATH 'strict $[*] ? (@ >= $y)'
COLUMNS (
z int PATH '$'
)
)
) jt;
-- Should fail (JSON arguments are not passed to column paths)
SELECT *
FROM JSON_TABLE(
jsonb '[1,2,3]',
'$[*] ? (@ < $x)'
PASSING 10 AS x
COLUMNS (y text FORMAT JSON PATH '$ ? (@ < $x)')
) jt;
-- Extension: non-constant JSON path
SELECT JSON_EXISTS(jsonb '{"a": 123}', '$' || '.' || 'a');
SELECT JSON_VALUE(jsonb '{"a": 123}', '$' || '.' || 'a');
SELECT JSON_VALUE(jsonb '{"a": 123}', '$' || '.' || 'b' DEFAULT 'foo' ON EMPTY);
SELECT JSON_QUERY(jsonb '{"a": 123}', '$' || '.' || 'a');
SELECT JSON_QUERY(jsonb '{"a": 123}', '$' || '.' || 'a' WITH WRAPPER);
-- Should fail (invalid path)
SELECT JSON_QUERY(jsonb '{"a": 123}', 'error' || ' ' || 'error');
-- Should fail (not supported)
SELECT * FROM JSON_TABLE(jsonb '{"a": 123}', '$' || '.' || 'a' COLUMNS (foo int));
-- Test parallel JSON_VALUE()
CREATE UNLOGGED TABLE test_parallel_jsonb_value AS
SELECT i::text::jsonb AS js
FROM generate_series(1, 50000) i;
-- encourage use of parallel plans
set parallel_setup_cost=0;
set parallel_tuple_cost=0;
set min_parallel_table_scan_size=0;
set max_parallel_workers_per_gather=4;
set parallel_leader_participation = off;
-- Should be non-parallel due to subtransactions
EXPLAIN (COSTS OFF)
SELECT sum(JSON_VALUE(js, '$' RETURNING numeric)) FROM test_parallel_jsonb_value;
SELECT sum(JSON_VALUE(js, '$' RETURNING numeric)) FROM test_parallel_jsonb_value;
-- Should be parallel
EXPLAIN (COSTS OFF)
SELECT sum(JSON_VALUE(js, '$' RETURNING numeric ERROR ON ERROR)) FROM test_parallel_jsonb_value;
SELECT sum(JSON_VALUE(js, '$' RETURNING numeric ERROR ON ERROR)) FROM test_parallel_jsonb_value;
DROP TABLE test_parallel_jsonb_value; | the_stack |
-- 2017-09-17T09:52:31.613
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,ColumnSQL,Created,CreatedBy,DDL_NoForeignKey,Description,EntityType,FieldLength,Help,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsDLMPartitionBoundary,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SelectionColumnSeqNo,SeqNo,Updated,UpdatedBy,Version)
SELECT 0,557178,225,0,10,291,'N','City','',TO_TIMESTAMP('2017-09-17 09:52:31','YYYY-MM-DD HH24:MI:SS'),100,'N','Name des Ortes','D',60,'Bezeichnet einen einzelnen Ort in diesem Land oder dieser Region.','Y','N','Y','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','Y','N','Ort',0,0,TO_TIMESTAMP('2017-09-17 09:52:31','YYYY-MM-DD HH24:MI:SS'),100,0
WHERE NOT EXISTS (SELECT 1 FROM AD_Column WHERE AD_TABLE_ID = 291 AND ColumnName = 'City')
;
-- 2017-09-17T09:52:31.622
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=557178 AND NOT EXISTS (SELECT 1 FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 2017-09-17T09:54:04.235
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='select city from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID', IsUpdateable='N',Updated=TO_TIMESTAMP('2017-09-17 09:54:04','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T09:56:32.029
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='select postal from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID', IsUpdateable='N',Updated=TO_TIMESTAMP('2017-09-17 09:56:32','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=822
;
-- 2017-09-17T09:58:02.187
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='',Updated=TO_TIMESTAMP('2017-09-17 09:58:02','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=822
;
-- 2017-09-17T09:58:25.526
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,ColumnSQL,Created,CreatedBy,DDL_NoForeignKey,Description,EntityType,FieldLength,Help,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsDLMPartitionBoundary,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SelectionColumnSeqNo,SeqNo,Updated,UpdatedBy,Version)
SELECT 0,557179,512,0,10,291,'N','Postal','select postal from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID',TO_TIMESTAMP('2017-09-17 09:58:25','YYYY-MM-DD HH24:MI:SS'),100,'N','Postleitzahl','D',10,'"PLZ" bezeichnet die Postleitzahl für diese Adresse oder dieses Postfach.','Y','N','Y','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','PLZ',0,0,TO_TIMESTAMP('2017-09-17 09:58:25','YYYY-MM-DD HH24:MI:SS'),100,0
WHERE NOT EXISTS (SELECT 1 FROM AD_Column WHERE AD_TABLE_ID = 291 AND ColumnName = 'Postal')
;
-- 2017-09-17T09:58:25.527
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=557179 AND NOT EXISTS (SELECT 1 FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 2017-09-17T09:58:51.596
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,ColumnSQL,Created,CreatedBy,DDL_NoForeignKey,Description,EntityType,FieldLength,Help,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsDLMPartitionBoundary,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SelectionColumnSeqNo,SeqNo,Updated,UpdatedBy,Version)
SELECT 0,557180,156,0,10,291,'N','Address1','select address1 from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID',TO_TIMESTAMP('2017-09-17 09:58:51','YYYY-MM-DD HH24:MI:SS'),100,'N','Adresszeile 1 für diesen Standort','D',10,'"Adresszeile 1" gibt die Anschrift für diesen Standort an.','Y','N','Y','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','Straße und Nr.',0,0,TO_TIMESTAMP('2017-09-17 09:58:51','YYYY-MM-DD HH24:MI:SS'),100,0
WHERE NOT EXISTS (SELECT 1 FROM AD_Column WHERE AD_TABLE_ID = 291 AND ColumnName = 'Address1')
;
-- 2017-09-17T09:58:51.600
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=557180 AND NOT EXISTS (SELECT 1 FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 2017-09-17T09:58:57.547
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET FieldLength=100,Updated=TO_TIMESTAMP('2017-09-17 09:58:57','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T10:01:06.679
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='select address1 from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID',Updated=TO_TIMESTAMP('2017-09-17 10:01:06','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T10:01:13.830
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select city from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:01:13','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T10:01:32.181
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select address1 from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:01:32','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T10:01:38.134
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select postal from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:01:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557179
;
-- 2017-09-17T10:04:28.097
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,EntityType,Help,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy)
SELECT 0,557180,560290,0,540871,0,TO_TIMESTAMP('2017-09-17 10:04:28','YYYY-MM-DD HH24:MI:SS'),100,'Adresszeile 1 für diesen Standort',60,'D','"Adresszeile 1" gibt die Anschrift für diesen Standort an.',0,'Y','Y','Y','Y','N','N','N','N','N','Straße und Nr.',290,320,0,1,1,TO_TIMESTAMP('2017-09-17 10:04:28','YYYY-MM-DD HH24:MI:SS'),100
WHERE NOT EXISTS (SELECT 1 FROM AD_Field JOIN AD_Column ON AD_Field.AD_Column_ID = AD_Column.AD_Column_ID WHERE AD_Column.AD_TABLE_ID = 291 AND AD_Column.ColumnName = 'Address1')
;
-- 2017-09-17T10:04:28.100
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=560290 AND NOT EXISTS (SELECT 1 FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
;
-- 2017-09-17T10:04:52.016
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,EntityType,Help,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy)
SELECT 0,557179,560291,0,540871,0,TO_TIMESTAMP('2017-09-17 10:04:51','YYYY-MM-DD HH24:MI:SS'),100,'Postleitzahl',60,'D','"PLZ" bezeichnet die Postleitzahl für diese Adresse oder dieses Postfach.',0,'Y','Y','Y','Y','N','N','N','N','N','PLZ',300,330,0,1,1,TO_TIMESTAMP('2017-09-17 10:04:51','YYYY-MM-DD HH24:MI:SS'),100
WHERE NOT EXISTS (SELECT 1 FROM AD_Field JOIN AD_Column ON AD_Field.AD_Column_ID = AD_Column.AD_Column_ID WHERE AD_Column.AD_TABLE_ID = 291 AND AD_Column.ColumnName = 'Postal')
;
-- 2017-09-17T10:04:52.018
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=560291 AND NOT EXISTS (SELECT 1 FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
;
-- 2017-09-17T10:05:13.844
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,EntityType,Help,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy)
SELECT 0,557178,560292,0,540871,0,TO_TIMESTAMP('2017-09-17 10:05:13','YYYY-MM-DD HH24:MI:SS'),100,'Name des Ortes',60,'D','Bezeichnet einen einzelnen Ort in diesem Land oder dieser Region.',0,'Y','Y','Y','Y','N','N','N','N','N','Ort',310,340,0,1,1,TO_TIMESTAMP('2017-09-17 10:05:13','YYYY-MM-DD HH24:MI:SS'),100
WHERE NOT EXISTS (SELECT 1 FROM AD_Field JOIN AD_Column ON AD_Field.AD_Column_ID = AD_Column.AD_Column_ID WHERE AD_Column.AD_TABLE_ID = 291 AND AD_Column.ColumnName = 'City')
;
-- 2017-09-17T10:05:13.848
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=560292 AND NOT EXISTS (SELECT 1 FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
;
-- 2017-09-17T10:05:49.999
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,Created,CreatedBy,IsActive,IsAdvancedField,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy)
SELECT 0,560290,0,540871,541151,548670,TO_TIMESTAMP('2017-09-17 10:05:49','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','Y','N','N','Strasse und Nr.',60,0,0,TO_TIMESTAMP('2017-09-17 10:05:49','YYYY-MM-DD HH24:MI:SS'),100
WHERE EXISTS (SELECT 1 FROM AD_Field where AD_Field.AD_Field_ID = 560290) AND NOT EXISTS (SELECT 1 FROM AD_UI_Element where AD_Field_ID = 560290)
;
-- 2017-09-17T10:06:01.340
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,Created,CreatedBy,IsActive,IsAdvancedField,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy)
SELECT 0,560291,0,540871,541151,548671,TO_TIMESTAMP('2017-09-17 10:06:01','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','Y','N','N','PLZ',70,0,0,TO_TIMESTAMP('2017-09-17 10:06:01','YYYY-MM-DD HH24:MI:SS'),100
WHERE EXISTS (SELECT 1 FROM AD_Field where AD_Field.AD_Field_ID = 560291) AND NOT EXISTS (SELECT 1 FROM AD_UI_Element where AD_Field_ID = 560291)
;
-- 2017-09-17T10:06:12.465
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,Created,CreatedBy,IsActive,IsAdvancedField,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy)
SELECT 0,560292,0,540871,541151,548672,TO_TIMESTAMP('2017-09-17 10:06:12','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','Y','N','N','Ort',80,0,0,TO_TIMESTAMP('2017-09-17 10:06:12','YYYY-MM-DD HH24:MI:SS'),100
WHERE EXISTS (SELECT 1 FROM AD_Field where AD_Field.AD_Field_ID = 560292) AND NOT EXISTS (SELECT 1 FROM AD_UI_Element where AD_Field_ID = 560292)
;
-- 2017-09-17T10:08:14.609
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select address1 from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''Y'' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:08:14','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T10:08:42.380
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select city from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''Y''
and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:08:42','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T10:08:57.323
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select postal from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''Y'' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID)',Updated=TO_TIMESTAMP('2017-09-17 10:08:57','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557179
;
-- 2017-09-17T10:11:10.233
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Column (AD_Client_ID,AD_Column_ID,AD_Element_ID,AD_Org_ID,AD_Reference_ID,AD_Table_ID,AllowZoomTo,ColumnName,Created,CreatedBy,DDL_NoForeignKey,Description,EntityType,FieldLength,Help,IsActive,IsAdvancedText,IsAllowLogging,IsAlwaysUpdateable,IsAutocomplete,IsCalculated,IsDimension,IsDLMPartitionBoundary,IsEncrypted,IsGenericZoomKeyColumn,IsGenericZoomOrigin,IsIdentifier,IsKey,IsLazyLoading,IsMandatory,IsParent,IsSelectionColumn,IsStaleable,IsSyncDatabase,IsTranslated,IsUpdateable,IsUseDocSequence,Name,SelectionColumnSeqNo,SeqNo,Updated,UpdatedBy,Version)
SELECT 0,557181,881,0,10,291,'N','EMail',TO_TIMESTAMP('2017-09-17 10:11:10','YYYY-MM-DD HH24:MI:SS'),100,'N','EMail-Adresse','D',200,'The Email Address is the Electronic Mail ID for this User and should be fully qualified (e.g. joe.smith@company.com). The Email Address is used to access the self service application functionality from the web.','Y','N','Y','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','N','Y','N','eMail',0,0,TO_TIMESTAMP('2017-09-17 10:11:10','YYYY-MM-DD HH24:MI:SS'),100,0
WHERE NOT EXISTS (SELECT 1 FROM AD_Column WHERE AD_TABLE_ID = 291 AND ColumnName = 'EMail')
;
-- 2017-09-17T10:11:10.237
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Column_Trl (AD_Language,AD_Column_ID, Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Column_ID, t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Column t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Column_ID=557181 AND NOT EXISTS (SELECT 1 FROM AD_Column_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Column_ID=t.AD_Column_ID)
;
-- 2017-09-17T10:12:17.828
-- Adjusted Insert to make sure that the column is not already created in other repositories
INSERT INTO AD_Field (AD_Client_ID,AD_Column_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,ColumnDisplayLength,Created,CreatedBy,Description,DisplayLength,EntityType,Help,IncludedTabHeight,IsActive,IsCentrallyMaintained,IsDisplayed,IsDisplayedGrid,IsEncrypted,IsFieldOnly,IsHeading,IsReadOnly,IsSameLine,Name,SeqNo,SeqNoGrid,SortNo,SpanX,SpanY,Updated,UpdatedBy)
SELECT 0,557181,560293,0,540871,0,TO_TIMESTAMP('2017-09-17 10:12:17','YYYY-MM-DD HH24:MI:SS'),100,'EMail-Adresse',0,'D','The Email Address is the Electronic Mail ID for this User and should be fully qualified (e.g. joe.smith@company.com). The Email Address is used to access the self service application functionality from the web.',0,'Y','Y','Y','Y','N','N','N','N','N','eMail',320,350,0,1,1,TO_TIMESTAMP('2017-09-17 10:12:17','YYYY-MM-DD HH24:MI:SS'),100
WHERE NOT EXISTS (SELECT 1 FROM AD_Field JOIN AD_Column ON AD_Field.AD_Column_ID = AD_Column.AD_Column_ID WHERE AD_Column.AD_TABLE_ID = 291 AND AD_Column.ColumnName = 'EMail')
;
-- 2017-09-17T10:12:17.831
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_Field_Trl (AD_Language,AD_Field_ID, Description,Help,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language,t.AD_Field_ID, t.Description,t.Help,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Field t WHERE l.IsActive='Y' AND l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N' AND t.AD_Field_ID=560293 AND NOT EXISTS (SELECT 1 FROM AD_Field_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Field_ID=t.AD_Field_ID)
;
-- 2017-09-17T10:12:32.086
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,Created,CreatedBy,IsActive,IsAdvancedField,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy)
SELECT 0,560293,0,540871,541151,548673,TO_TIMESTAMP('2017-09-17 10:12:32','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','Y','N','N','eMail',90,0,0,TO_TIMESTAMP('2017-09-17 10:12:32','YYYY-MM-DD HH24:MI:SS'),100
WHERE EXISTS (SELECT 1 FROM AD_Field where AD_Field.AD_Field_ID = 560293) AND NOT EXISTS (SELECT 1 FROM AD_UI_Element where AD_Field_ID = 560293)
;
-- 2017-09-17T10:12:46.854
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='',Updated=TO_TIMESTAMP('2017-09-17 10:12:46','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T10:12:56.623
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='',Updated=TO_TIMESTAMP('2017-09-17 10:12:56','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T10:13:05.762
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='',Updated=TO_TIMESTAMP('2017-09-17 10:13:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557179
;
-- 2017-09-17T10:13:29.595
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ SELECT public.db_alter_table('c_bpartner','ALTER TABLE public.C_BPartner ADD COLUMN Postal VARCHAR(10)')
;
-- 2017-09-17T10:13:40.539
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ SELECT public.db_alter_table('c_bpartner','ALTER TABLE public.C_BPartner ADD COLUMN EMail VARCHAR(200)')
;
-- 2017-09-17T10:14:05.103
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ SELECT public.db_alter_table('c_bpartner','ALTER TABLE public.C_BPartner ADD COLUMN City VARCHAR(60)')
;
-- 2017-09-17T10:14:13.892
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
/* DDL */ SELECT public.db_alter_table('c_bpartner','ALTER TABLE public.C_BPartner ADD COLUMN Address1 VARCHAR(100)')
;
-- 2017-09-17T10:15:54.595
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select city from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID )',Updated=TO_TIMESTAMP('2017-09-17 10:15:54','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T10:17:28.171
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select city from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''Y'' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID )',Updated=TO_TIMESTAMP('2017-09-17 10:17:28','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557178
;
-- 2017-09-17T10:18:41.339
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select postal from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''''Y'''' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID )',Updated=TO_TIMESTAMP('2017-09-17 10:18:41','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557179
;
-- 2017-09-17T10:18:50.832
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select postal from c_bpartner_location bpl
join c_location l on bpl.c_location_id = l.c_location_id
where isbilltodefault = ''Y'' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID )',Updated=TO_TIMESTAMP('2017-09-17 10:18:50','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557179
;
-- 2017-09-17T10:19:25.133
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(select address1 from c_bpartner_location bpl join c_location l on bpl.c_location_id = l.c_location_id where isbilltodefault = ''Y'' and bpl.c_bpartner_id = C_BPartner.C_BPartner_ID )',Updated=TO_TIMESTAMP('2017-09-17 10:19:25','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557180
;
-- 2017-09-17T11:06:33.454
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET ColumnSQL='(SELECT Email FROM AD_User u WHERE u.C_BPartner_ID=C_BPartner.C_BPartner_ID AND u.IsActive=''Y'' AND u.IsDefaultContact=''Y'')', IsUpdateable='N',Updated=TO_TIMESTAMP('2017-09-17 11:06:33','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=557181
; | the_stack |
--
-- FLOAT8
--
--Testcase 113:
CREATE EXTENSION duckdb_fdw;
--Testcase 114:
CREATE SERVER sqlite_svr FOREIGN DATA WRAPPER duckdb_fdw
OPTIONS (database '/tmp/sqlitefdw_test_core.db');
--Testcase 115:
CREATE FOREIGN TABLE FLOAT8_TBL(f1 float8 OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 116:
CREATE FOREIGN TABLE FLOAT8_TMP(f1 float8, f2 float8, id int OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 1:
INSERT INTO FLOAT8_TBL(f1) VALUES (' 0.0 ');
--Testcase 2:
INSERT INTO FLOAT8_TBL(f1) VALUES ('1004.30 ');
--Testcase 3:
INSERT INTO FLOAT8_TBL(f1) VALUES (' -34.84');
--Testcase 4:
INSERT INTO FLOAT8_TBL(f1) VALUES ('1.2345678901234e+200');
--Testcase 5:
INSERT INTO FLOAT8_TBL(f1) VALUES ('1.2345678901234e-200');
-- test for underflow and overflow handling
--Testcase 6:
INSERT INTO FLOAT8_TMP(f1) VALUES ('10e400'::float8);
--Testcase 7:
INSERT INTO FLOAT8_TMP(f1) VALUES ('-10e400'::float8);
--Testcase 8:
INSERT INTO FLOAT8_TMP(f1) VALUES ('10e-400'::float8);
--Testcase 9:
INSERT INTO FLOAT8_TMP(f1) VALUES ('-10e-400'::float8);
-- test smallest normalized input
--Testcase 117:
INSERT INTO FLOAT8_TMP(f1) VALUES ('2.2250738585072014E-308'::float8);
--Testcase 118:
SELECT float8send(f1) FROM FLOAT8_TMP;
-- bad input
--Testcase 10:
INSERT INTO FLOAT8_TBL(f1) VALUES ('');
--Testcase 11:
INSERT INTO FLOAT8_TBL(f1) VALUES (' ');
--Testcase 12:
INSERT INTO FLOAT8_TBL(f1) VALUES ('xyz');
--Testcase 13:
INSERT INTO FLOAT8_TBL(f1) VALUES ('5.0.0');
--Testcase 14:
INSERT INTO FLOAT8_TBL(f1) VALUES ('5 . 0');
--Testcase 15:
INSERT INTO FLOAT8_TBL(f1) VALUES ('5. 0');
--Testcase 16:
INSERT INTO FLOAT8_TBL(f1) VALUES (' - 3');
--Testcase 17:
INSERT INTO FLOAT8_TBL(f1) VALUES ('123 5');
-- special inputs
--Testcase 19:
DELETE FROM FLOAT8_TMP;
--Testcase 119:
INSERT INTO FLOAT8_TMP VALUES ('NaN'::float8);
--Testcase 120:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 20:
DELETE FROM FLOAT8_TMP;
--Testcase 121:
INSERT INTO FLOAT8_TMP VALUES ('nan'::float8);
--Testcase 122:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 21:
DELETE FROM FLOAT8_TMP;
--Testcase 123:
INSERT INTO FLOAT8_TMP VALUES (' NAN '::float8);
--Testcase 124:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 22:
DELETE FROM FLOAT8_TMP;
--Testcase 125:
INSERT INTO FLOAT8_TMP VALUES ('infinity'::float8);
--Testcase 126:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 23:
DELETE FROM FLOAT8_TMP;
--Testcase 127:
INSERT INTO FLOAT8_TMP VALUES (' -INFINiTY '::float8);
--Testcase 128:
SELECT f1 FROM FLOAT8_TMP;
-- bad special inputs
--Testcase 25:
DELETE FROM FLOAT8_TMP;
--Testcase 129:
INSERT INTO FLOAT8_TMP VALUES ('N A N'::float8);
--Testcase 130:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 26:
DELETE FROM FLOAT8_TMP;
--Testcase 131:
INSERT INTO FLOAT8_TMP VALUES ('NaN x'::float8);
--Testcase 132:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 27:
DELETE FROM FLOAT8_TMP;
--Testcase 133:
INSERT INTO FLOAT8_TMP VALUES (' INFINITY x'::float8);
--Testcase 134:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 28:
DELETE FROM FLOAT8_TMP;
--Testcase 135:
INSERT INTO FLOAT8_TMP VALUES ('Infinity'::float8 + 100.0);
--Testcase 136:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 30:
DELETE FROM FLOAT8_TMP;
--Testcase 137:
INSERT INTO FLOAT8_TMP VALUES ('Infinity'::float8 / 'Infinity'::float8);
--Testcase 138:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 31:
DELETE FROM FLOAT8_TMP;
--Testcase 139:
INSERT INTO FLOAT8_TMP VALUES ('nan'::float8 / 'nan'::float8);
--Testcase 140:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 32:
DELETE FROM FLOAT8_TMP;
--Testcase 141:
INSERT INTO FLOAT8_TMP VALUES ('nan'::numeric::float8);
--Testcase 142:
SELECT f1 FROM FLOAT8_TMP;
--Testcase 34:
SELECT '' AS five, * FROM FLOAT8_TBL;
--Testcase 35:
SELECT '' AS four, f.* FROM FLOAT8_TBL f WHERE f.f1 <> '1004.3';
--Testcase 36:
SELECT '' AS one, f.* FROM FLOAT8_TBL f WHERE f.f1 = '1004.3';
--Testcase 37:
SELECT '' AS three, f.* FROM FLOAT8_TBL f WHERE '1004.3' > f.f1;
--Testcase 38:
SELECT '' AS three, f.* FROM FLOAT8_TBL f WHERE f.f1 < '1004.3';
--Testcase 39:
SELECT '' AS four, f.* FROM FLOAT8_TBL f WHERE '1004.3' >= f.f1;
--Testcase 40:
SELECT '' AS four, f.* FROM FLOAT8_TBL f WHERE f.f1 <= '1004.3';
--Testcase 41:
SELECT '' AS three, f.f1, f.f1 * '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
--Testcase 42:
SELECT '' AS three, f.f1, f.f1 + '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
--Testcase 43:
SELECT '' AS three, f.f1, f.f1 / '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
--Testcase 44:
SELECT '' AS three, f.f1, f.f1 - '-10' AS x
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
--Testcase 45:
SELECT '' AS one, f.f1 ^ '2.0' AS square_f1
FROM FLOAT8_TBL f where f.f1 = '1004.3';
-- absolute value
--Testcase 46:
SELECT '' AS five, f.f1, @f.f1 AS abs_f1
FROM FLOAT8_TBL f;
-- truncate
--Testcase 47:
SELECT '' AS five, f.f1, trunc(f.f1) AS trunc_f1
FROM FLOAT8_TBL f;
-- round
--Testcase 48:
SELECT '' AS five, f.f1, round(f.f1) AS round_f1
FROM FLOAT8_TBL f;
-- ceil / ceiling
--Testcase 49:
select ceil(f1) as ceil_f1 from float8_tbl f;
--Testcase 50:
select ceiling(f1) as ceiling_f1 from float8_tbl f;
-- floor
--Testcase 51:
select floor(f1) as floor_f1 from float8_tbl f;
-- sign
--Testcase 52:
select sign(f1) as sign_f1 from float8_tbl f;
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
-- square root
BEGIN;
--Testcase 53:
DELETE FROM FLOAT8_TBL;
--Testcase 54:
INSERT INTO FLOAT8_TBL VALUES ('64'::float8);
--Testcase 55:
SELECT sqrt(f1) as eight FROM FLOAT8_TBL;
--Testcase 56:
SELECT |/f1 as eight FROM FLOAT8_TBL;
ROLLBACK;
--Testcase 57:
SELECT '' AS three, f.f1, |/f.f1 AS sqrt_f1
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
-- power
--Testcase 58:
DELETE FROM FLOAT8_TMP;
--Testcase 143:
INSERT INTO FLOAT8_TMP VALUES ('144'::float8, '0.5'::float8);
--Testcase 144:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 60:
DELETE FROM FLOAT8_TMP;
--Testcase 145:
INSERT INTO FLOAT8_TMP VALUES ('NaN'::float8, '0.5'::float8);
--Testcase 146:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 61:
DELETE FROM FLOAT8_TMP;
--Testcase 147:
INSERT INTO FLOAT8_TMP VALUES ('144'::float8, 'NaN'::float8);
--Testcase 148:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 62:
DELETE FROM FLOAT8_TMP;
--Testcase 149:
INSERT INTO FLOAT8_TMP VALUES ('NaN'::float8, 'NaN'::float8);
--Testcase 150:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 63:
DELETE FROM FLOAT8_TMP;
--Testcase 151:
INSERT INTO FLOAT8_TMP VALUES ('-1'::float8, 'NaN'::float8);
--Testcase 152:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 64:
DELETE FROM FLOAT8_TMP;
--Testcase 153:
INSERT INTO FLOAT8_TMP VALUES ('1'::float8, 'NaN'::float8);
--Testcase 154:
SELECT power(f1, f2) FROM FLOAT8_TMP;
--Testcase 65:
DELETE FROM FLOAT8_TMP;
--Testcase 155:
INSERT INTO FLOAT8_TMP VALUES ('NaN'::float8 , '0'::float8);
--Testcase 156:
SELECT power(f1, f2) FROM FLOAT8_TMP;
-- take exp of ln(f.f1)
--Testcase 67:
SELECT '' AS three, f.f1, exp(ln(f.f1)) AS exp_ln_f1
FROM FLOAT8_TBL f
WHERE f.f1 > '0.0';
-- cube root
BEGIN;
--Testcase 68:
DELETE FROM FLOAT8_TBL;
--Testcase 69:
INSERT INTO FLOAT8_TBL VALUES ('27'::float8);
--Testcase 70:
SELECT ||/f1 as three FROM FLOAT8_TBL;
ROLLBACK;
--Testcase 71:
SELECT '' AS five, f.f1, ||/f.f1 AS cbrt_f1 FROM FLOAT8_TBL f;
--Testcase 72:
SELECT '' AS five, * FROM FLOAT8_TBL;
--Testcase 73:
UPDATE FLOAT8_TBL
SET f1 = FLOAT8_TBL.f1 * '-1'
WHERE FLOAT8_TBL.f1 > '0.0';
--Testcase 74:
SELECT '' AS bad, f.f1 * '1e200' from FLOAT8_TBL f;
--Testcase 75:
SELECT '' AS bad, f.f1 ^ '1e200' from FLOAT8_TBL f;
BEGIN;
--Testcase 76:
DELETE FROM FLOAT8_TBL;
--Testcase 77:
INSERT INTO FLOAT8_TBL VALUES (0 ^ 0 + 0 ^ 1 + 0 ^ 0.0 + 0 ^ 0.5);
--Testcase 78:
SELECT * FROM FLOAT8_TBL;
ROLLBACK;
--Testcase 79:
SELECT '' AS bad, ln(f.f1) from FLOAT8_TBL f where f.f1 = '0.0' ;
--Testcase 80:
SELECT '' AS bad, ln(f.f1) from FLOAT8_TBL f where f.f1 < '0.0' ;
--Testcase 81:
SELECT '' AS bad, exp(f.f1) from FLOAT8_TBL f;
--Testcase 82:
SELECT '' AS bad, f.f1 / '0.0' from FLOAT8_TBL f;
--Testcase 83:
SELECT '' AS five, * FROM FLOAT8_TBL;
-- hyperbolic functions
-- we run these with extra_float_digits = 0 too, since different platforms
-- tend to produce results that vary in the last place.
--Testcase 157:
DELETE FROM FLOAT8_TMP;
--Testcase 158:
INSERT INTO FLOAT8_TMP(f1) VALUES (1);
--Testcase 159:
SELECT sinh(f1) FROM FLOAT8_TMP;
--Testcase 160:
SELECT cosh(f1) FROM FLOAT8_TMP;
--Testcase 161:
SELECT tanh(f1) FROM FLOAT8_TMP;
--Testcase 162:
SELECT asinh(f1) FROM FLOAT8_TMP;
--Testcase 163:
DELETE FROM FLOAT8_TMP;
--Testcase 164:
INSERT INTO FLOAT8_TMP(f1) VALUES (2);
--Testcase 165:
SELECT acosh(f1) FROM FLOAT8_TMP;
--Testcase 166:
DELETE FROM FLOAT8_TMP;
--Testcase 167:
INSERT INTO FLOAT8_TMP(f1) VALUES (0.5);
--Testcase 168:
SELECT atanh(f1) FROM FLOAT8_TMP;
-- test Inf/NaN cases for hyperbolic functions
--Testcase 169:
DELETE FROM FLOAT8_TMP;
--Testcase 170:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'infinity');
--Testcase 171:
SELECT sinh(f1) FROM FLOAT8_TMP;
--Testcase 172:
DELETE FROM FLOAT8_TMP;
--Testcase 173:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 '-infinity');
--Testcase 174:
SELECT sinh(f1) FROM FLOAT8_TMP;
--Testcase 175:
DELETE FROM FLOAT8_TMP;
--Testcase 176:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'nan');
--Testcase 177:
SELECT sinh(f1) FROM FLOAT8_TMP;
--Testcase 178:
DELETE FROM FLOAT8_TMP;
--Testcase 179:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'infinity');
--Testcase 180:
SELECT cosh(f1) FROM FLOAT8_TMP;
--Testcase 181:
DELETE FROM FLOAT8_TMP;
--Testcase 182:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 '-infinity');
--Testcase 183:
SELECT cosh(f1) FROM FLOAT8_TMP;
--Testcase 184:
DELETE FROM FLOAT8_TMP;
--Testcase 185:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'nan');
--Testcase 186:
SELECT cosh(f1) FROM FLOAT8_TMP;
--Testcase 187:
DELETE FROM FLOAT8_TMP;
--Testcase 188:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'infinity');
--Testcase 189:
SELECT tanh(f1) FROM FLOAT8_TMP;
--Testcase 190:
DELETE FROM FLOAT8_TMP;
--Testcase 191:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 '-infinity');
--Testcase 192:
SELECT tanh(f1) FROM FLOAT8_TMP;
--Testcase 193:
DELETE FROM FLOAT8_TMP;
--Testcase 194:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'nan');
--Testcase 195:
SELECT tanh(f1) FROM FLOAT8_TMP;
--Testcase 196:
DELETE FROM FLOAT8_TMP;
--Testcase 197:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'infinity');
--Testcase 198:
SELECT asinh(f1) FROM FLOAT8_TMP;
--Testcase 199:
DELETE FROM FLOAT8_TMP;
--Testcase 200:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 '-infinity');
--Testcase 201:
SELECT asinh(f1) FROM FLOAT8_TMP;
--Testcase 202:
DELETE FROM FLOAT8_TMP;
--Testcase 203:
INSERT INTO FLOAT8_TMP(f1) VALUES (float8 'nan');
--Testcase 204:
SELECT asinh(f1) FROM FLOAT8_TMP;
-- acosh(Inf) should be Inf, but some mingw versions produce NaN, so skip test
-- SELECT acosh(float8 'infinity');
--Testcase 205:
DELETE FROM FLOAT8_TMP;
--Testcase 206:
INSERT INTO FLOAT8_TMP VALUES (float8 '-infinity');
--Testcase 207:
SELECT acosh(f1) FROM FLOAT8_TMP;
--Testcase 208:
DELETE FROM FLOAT8_TMP;
--Testcase 209:
INSERT INTO FLOAT8_TMP VALUES ((float8 'nan'));
--Testcase 210:
SELECT acosh(f1) FROM FLOAT8_TMP;
--Testcase 211:
DELETE FROM FLOAT8_TMP;
--Testcase 212:
INSERT INTO FLOAT8_TMP VALUES ((float8 'infinity'));
--Testcase 213:
SELECT atanh(f1) FROM FLOAT8_TMP;
--Testcase 214:
DELETE FROM FLOAT8_TMP;
--Testcase 215:
INSERT INTO FLOAT8_TMP VALUES ((float8 '-infinity'));
--Testcase 216:
SELECT atanh(f1) FROM FLOAT8_TMP;
--Testcase 217:
DELETE FROM FLOAT8_TMP;
--Testcase 218:
INSERT INTO FLOAT8_TMP VALUES ((float8 'nan'));
--Testcase 219:
SELECT atanh(f1) FROM FLOAT8_TMP;
RESET extra_float_digits;
-- test for over- and underflow
--Testcase 84:
INSERT INTO FLOAT8_TBL(f1) VALUES ('10e400');
--Testcase 85:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e400');
--Testcase 86:
INSERT INTO FLOAT8_TBL(f1) VALUES ('10e-400');
--Testcase 87:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-10e-400');
-- maintain external table consistency across platforms
-- delete all values and reinsert well-behaved ones
--Testcase 88:
DELETE FROM FLOAT8_TBL;
--Testcase 89:
INSERT INTO FLOAT8_TBL(f1) VALUES ('0.0');
--Testcase 90:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-34.84');
--Testcase 91:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1004.30');
--Testcase 92:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e+200');
--Testcase 93:
INSERT INTO FLOAT8_TBL(f1) VALUES ('-1.2345678901234e-200');
--Testcase 94:
SELECT '' AS five, * FROM FLOAT8_TBL;
-- test edge-case coercions to integer
--Testcase 220:
DELETE FROM FLOAT8_TMP;
--Testcase 221:
INSERT INTO FLOAT8_TMP VALUES ('32767.4'::float8);
--Testcase 222:
SELECT f1::int2 FROM FLOAT8_TMP;
--Testcase 223:
DELETE FROM FLOAT8_TMP;
--Testcase 224:
INSERT INTO FLOAT8_TMP VALUES ('32767.6'::float8);
--Testcase 225:
SELECT f1::int2 FROM FLOAT8_TMP;
--Testcase 226:
DELETE FROM FLOAT8_TMP;
--Testcase 227:
INSERT INTO FLOAT8_TMP VALUES ('-32768.4'::float8);
--Testcase 228:
SELECT f1::int2 FROM FLOAT8_TMP;
--Testcase 229:
DELETE FROM FLOAT8_TMP;
--Testcase 230:
INSERT INTO FLOAT8_TMP VALUES ('-32768.6'::float8);
--Testcase 231:
SELECT f1::int2 FROM FLOAT8_TMP;
--Testcase 232:
DELETE FROM FLOAT8_TMP;
--Testcase 233:
INSERT INTO FLOAT8_TMP VALUES ('2147483647.4'::float8);
--Testcase 234:
SELECT f1::int4 FROM FLOAT8_TMP;
--Testcase 235:
DELETE FROM FLOAT8_TMP;
--Testcase 236:
INSERT INTO FLOAT8_TMP VALUES ('2147483647.6'::float8);
--Testcase 237:
SELECT f1::int4 FROM FLOAT8_TMP;
--Testcase 238:
DELETE FROM FLOAT8_TMP;
--Testcase 239:
INSERT INTO FLOAT8_TMP VALUES ('-2147483648.4'::float8);
--Testcase 240:
SELECT f1::int4 FROM FLOAT8_TMP;
--Testcase 241:
DELETE FROM FLOAT8_TMP;
--Testcase 242:
INSERT INTO FLOAT8_TMP VALUES ('-2147483648.6'::float8);
--Testcase 243:
SELECT f1::int4 FROM FLOAT8_TMP;
--Testcase 244:
DELETE FROM FLOAT8_TMP;
--Testcase 245:
INSERT INTO FLOAT8_TMP VALUES ('9223372036854773760'::float8);
--Testcase 246:
SELECT f1::int8 FROM FLOAT8_TMP;
--Testcase 247:
DELETE FROM FLOAT8_TMP;
--Testcase 248:
INSERT INTO FLOAT8_TMP VALUES ('9223372036854775807'::float8);
--Testcase 249:
SELECT f1::int8 FROM FLOAT8_TMP;
--Testcase 250:
DELETE FROM FLOAT8_TMP;
--Testcase 251:
INSERT INTO FLOAT8_TMP VALUES ('-9223372036854775808.5'::float8);
--Testcase 252:
SELECT f1::int8 FROM FLOAT8_TMP;
--Testcase 253:
DELETE FROM FLOAT8_TMP;
--Testcase 254:
INSERT INTO FLOAT8_TMP VALUES ('-9223372036854780000'::float8);
--Testcase 255:
SELECT f1::int8 FROM FLOAT8_TMP;
-- test exact cases for trigonometric functions in degrees
BEGIN;
--Testcase 95:
DELETE FROM FLOAT8_TBL;
--Testcase 96:
INSERT INTO FLOAT8_TBL VALUES (0), (30), (90), (150), (180),
(210), (270), (330), (360);
--Testcase 97:
SELECT f1,
sind(f1),
sind(f1) IN (-1,-0.5,0,0.5,1) AS sind_exact
FROM FLOAT8_TBL;
--Testcase 98:
DELETE FROM FLOAT8_TBL;
--Testcase 99:
INSERT INTO FLOAT8_TBL VALUES (0), (60), (90), (120), (180),
(240), (270), (300), (360);
--Testcase 100:
SELECT f1,
cosd(f1),
cosd(f1) IN (-1,-0.5,0,0.5,1) AS cosd_exact
FROM FLOAT8_TBL;
--Testcase 101:
DELETE FROM FLOAT8_TBL;
--Testcase 102:
INSERT INTO FLOAT8_TBL VALUES (0), (45), (90), (135), (180),
(225), (270), (315), (360);
--Testcase 103:
SELECT f1,
tand(f1),
tand(f1) IN ('-Infinity'::float8,-1,0,
1,'Infinity'::float8) AS tand_exact,
cotd(f1),
cotd(f1) IN ('-Infinity'::float8,-1,0,
1,'Infinity'::float8) AS cotd_exact
FROM FLOAT8_TBL;
--Testcase 104:
DELETE FROM FLOAT8_TBL;
--Testcase 105:
INSERT INTO FLOAT8_TBL VALUES (-1), (-0.5), (0), (0.5), (1);
--Testcase 106:
SELECT f1,
asind(f1),
asind(f1) IN (-90,-30,0,30,90) AS asind_exact,
acosd(f1),
acosd(f1) IN (0,60,90,120,180) AS acosd_exact
FROM FLOAT8_TBL;
--Testcase 107:
DELETE FROM FLOAT8_TBL;
--Testcase 108:
INSERT INTO FLOAT8_TBL VALUES ('-Infinity'::float8), (-1), (0), (1),
('Infinity'::float8);
--Testcase 109:
SELECT f1,
atand(f1),
atand(f1) IN (-90,-45,0,45,90) AS atand_exact
FROM FLOAT8_TBL;
--Testcase 110:
DELETE FROM FLOAT8_TBL;
--Testcase 111:
INSERT INTO FLOAT8_TBL SELECT * FROM generate_series(0, 360, 90);
--Testcase 112:
SELECT x, y,
atan2d(y, x),
atan2d(y, x) IN (-90,0,90,180) AS atan2d_exact
FROM (SELECT 10*cosd(f1), 10*sind(f1)
FROM FLOAT8_TBL) AS t(x,y);
ROLLBACK;
--
-- test output (and round-trip safety) of various values.
-- To ensure we're testing what we think we're testing, start with
-- float values specified by bit patterns (as a useful side effect,
-- this means we'll fail on non-IEEE platforms).
--Testcase 256:
create type xfloat8;
--Testcase 257:
create function xfloat8in(cstring) returns xfloat8 immutable strict
language internal as 'int8in';
--Testcase 258:
create function xfloat8out(xfloat8) returns cstring immutable strict
language internal as 'int8out';
--Testcase 259:
create type xfloat8 (input = xfloat8in, output = xfloat8out, like = float8);
--Testcase 260:
create cast (xfloat8 as float8) without function;
--Testcase 261:
create cast (float8 as xfloat8) without function;
--Testcase 262:
create cast (xfloat8 as bigint) without function;
--Testcase 263:
create cast (bigint as xfloat8) without function;
-- float8: seeeeeee eeeeeeee eeeeeeee mmmmmmmm mmmmmmmm(x4)
-- we don't care to assume the platform's strtod() handles subnormals
-- correctly; those are "use at your own risk". However we do test
-- subnormal outputs, since those are under our control.
--Testcase 264:
create foreign table testdata(bits text, id int OPTIONS (key 'true')) server sqlite_svr;
begin;
--Testcase 265:
insert into testdata(bits) values
-- small subnormals
(x'0000000000000001'),
(x'0000000000000002'), (x'0000000000000003'),
(x'0000000000001000'), (x'0000000100000000'),
(x'0000010000000000'), (x'0000010100000000'),
(x'0000400000000000'), (x'0000400100000000'),
(x'0000800000000000'), (x'0000800000000001'),
-- these values taken from upstream testsuite
(x'00000000000f4240'),
(x'00000000016e3600'),
(x'0000008cdcdea440'),
-- borderline between subnormal and normal
(x'000ffffffffffff0'), (x'000ffffffffffff1'),
(x'000ffffffffffffe'), (x'000fffffffffffff');
--Testcase 266:
select float8send(flt) as ibits,
flt
from (select bits::bit(64)::bigint::xfloat8::float8 as flt
from testdata
offset 0) s;
rollback;
-- round-trip tests
begin;
--Testcase 267:
insert into testdata(bits) values
(x'0000000000000000'),
-- smallest normal values
(x'0010000000000000'), (x'0010000000000001'),
(x'0010000000000002'), (x'0018000000000000'),
--
(x'3ddb7cdfd9d7bdba'), (x'3ddb7cdfd9d7bdbb'), (x'3ddb7cdfd9d7bdbc'),
(x'3e112e0be826d694'), (x'3e112e0be826d695'), (x'3e112e0be826d696'),
(x'3e45798ee2308c39'), (x'3e45798ee2308c3a'), (x'3e45798ee2308c3b'),
(x'3e7ad7f29abcaf47'), (x'3e7ad7f29abcaf48'), (x'3e7ad7f29abcaf49'),
(x'3eb0c6f7a0b5ed8c'), (x'3eb0c6f7a0b5ed8d'), (x'3eb0c6f7a0b5ed8e'),
(x'3ee4f8b588e368ef'), (x'3ee4f8b588e368f0'), (x'3ee4f8b588e368f1'),
(x'3f1a36e2eb1c432c'), (x'3f1a36e2eb1c432d'), (x'3f1a36e2eb1c432e'),
(x'3f50624dd2f1a9fb'), (x'3f50624dd2f1a9fc'), (x'3f50624dd2f1a9fd'),
(x'3f847ae147ae147a'), (x'3f847ae147ae147b'), (x'3f847ae147ae147c'),
(x'3fb9999999999999'), (x'3fb999999999999a'), (x'3fb999999999999b'),
-- values very close to 1
(x'3feffffffffffff0'), (x'3feffffffffffff1'), (x'3feffffffffffff2'),
(x'3feffffffffffff3'), (x'3feffffffffffff4'), (x'3feffffffffffff5'),
(x'3feffffffffffff6'), (x'3feffffffffffff7'), (x'3feffffffffffff8'),
(x'3feffffffffffff9'), (x'3feffffffffffffa'), (x'3feffffffffffffb'),
(x'3feffffffffffffc'), (x'3feffffffffffffd'), (x'3feffffffffffffe'),
(x'3fefffffffffffff'),
(x'3ff0000000000000'),
(x'3ff0000000000001'), (x'3ff0000000000002'), (x'3ff0000000000003'),
(x'3ff0000000000004'), (x'3ff0000000000005'), (x'3ff0000000000006'),
(x'3ff0000000000007'), (x'3ff0000000000008'), (x'3ff0000000000009'),
--
(x'3ff921fb54442d18'),
(x'4005bf0a8b14576a'),
(x'400921fb54442d18'),
--
(x'4023ffffffffffff'), (x'4024000000000000'), (x'4024000000000001'),
(x'4058ffffffffffff'), (x'4059000000000000'), (x'4059000000000001'),
(x'408f3fffffffffff'), (x'408f400000000000'), (x'408f400000000001'),
(x'40c387ffffffffff'), (x'40c3880000000000'), (x'40c3880000000001'),
(x'40f869ffffffffff'), (x'40f86a0000000000'), (x'40f86a0000000001'),
(x'412e847fffffffff'), (x'412e848000000000'), (x'412e848000000001'),
(x'416312cfffffffff'), (x'416312d000000000'), (x'416312d000000001'),
(x'4197d783ffffffff'), (x'4197d78400000000'), (x'4197d78400000001'),
(x'41cdcd64ffffffff'), (x'41cdcd6500000000'), (x'41cdcd6500000001'),
(x'4202a05f1fffffff'), (x'4202a05f20000000'), (x'4202a05f20000001'),
(x'42374876e7ffffff'), (x'42374876e8000000'), (x'42374876e8000001'),
(x'426d1a94a1ffffff'), (x'426d1a94a2000000'), (x'426d1a94a2000001'),
(x'42a2309ce53fffff'), (x'42a2309ce5400000'), (x'42a2309ce5400001'),
(x'42d6bcc41e8fffff'), (x'42d6bcc41e900000'), (x'42d6bcc41e900001'),
(x'430c6bf52633ffff'), (x'430c6bf526340000'), (x'430c6bf526340001'),
(x'4341c37937e07fff'), (x'4341c37937e08000'), (x'4341c37937e08001'),
(x'4376345785d89fff'), (x'4376345785d8a000'), (x'4376345785d8a001'),
(x'43abc16d674ec7ff'), (x'43abc16d674ec800'), (x'43abc16d674ec801'),
(x'43e158e460913cff'), (x'43e158e460913d00'), (x'43e158e460913d01'),
(x'4415af1d78b58c3f'), (x'4415af1d78b58c40'), (x'4415af1d78b58c41'),
(x'444b1ae4d6e2ef4f'), (x'444b1ae4d6e2ef50'), (x'444b1ae4d6e2ef51'),
(x'4480f0cf064dd591'), (x'4480f0cf064dd592'), (x'4480f0cf064dd593'),
(x'44b52d02c7e14af5'), (x'44b52d02c7e14af6'), (x'44b52d02c7e14af7'),
(x'44ea784379d99db3'), (x'44ea784379d99db4'), (x'44ea784379d99db5'),
(x'45208b2a2c280290'), (x'45208b2a2c280291'), (x'45208b2a2c280292'),
--
(x'7feffffffffffffe'), (x'7fefffffffffffff'),
-- round to even tests (+ve)
(x'4350000000000002'),
(x'4350000000002e06'),
(x'4352000000000003'),
(x'4352000000000004'),
(x'4358000000000003'),
(x'4358000000000004'),
(x'435f000000000020'),
-- round to even tests (-ve)
(x'c350000000000002'),
(x'c350000000002e06'),
(x'c352000000000003'),
(x'c352000000000004'),
(x'c358000000000003'),
(x'c358000000000004'),
(x'c35f000000000020'),
-- exercise fixed-point memmoves
(x'42dc12218377de66'),
(x'42a674e79c5fe51f'),
(x'4271f71fb04cb74c'),
(x'423cbe991a145879'),
(x'4206fee0e1a9e061'),
(x'41d26580b487e6b4'),
(x'419d6f34540ca453'),
(x'41678c29dcd6e9dc'),
(x'4132d687e3df217d'),
(x'40fe240c9fcb68c8'),
(x'40c81cd6e63c53d3'),
(x'40934a4584fd0fdc'),
(x'405edd3c07fb4c93'),
(x'4028b0fcd32f7076'),
(x'3ff3c0ca428c59f8'),
-- these cases come from the upstream's testsuite
-- LotsOfTrailingZeros)
(x'3e60000000000000'),
-- Regression
(x'c352bd2668e077c4'),
(x'434018601510c000'),
(x'43d055dc36f24000'),
(x'43e052961c6f8000'),
(x'3ff3c0ca2a5b1d5d'),
-- LooksLikePow5
(x'4830f0cf064dd592'),
(x'4840f0cf064dd592'),
(x'4850f0cf064dd592'),
-- OutputLength
(x'3ff3333333333333'),
(x'3ff3ae147ae147ae'),
(x'3ff3be76c8b43958'),
(x'3ff3c083126e978d'),
(x'3ff3c0c1fc8f3238'),
(x'3ff3c0c9539b8887'),
(x'3ff3c0ca2a5b1d5d'),
(x'3ff3c0ca4283de1b'),
(x'3ff3c0ca43db770a'),
(x'3ff3c0ca428abd53'),
(x'3ff3c0ca428c1d2b'),
(x'3ff3c0ca428c51f2'),
(x'3ff3c0ca428c58fc'),
(x'3ff3c0ca428c59dd'),
(x'3ff3c0ca428c59f8'),
(x'3ff3c0ca428c59fb'),
-- 32-bit chunking
(x'40112e0be8047a7d'),
(x'40112e0be815a889'),
(x'40112e0be826d695'),
(x'40112e0be83804a1'),
(x'40112e0be84932ad'),
-- MinMaxShift
(x'0040000000000000'),
(x'007fffffffffffff'),
(x'0290000000000000'),
(x'029fffffffffffff'),
(x'4350000000000000'),
(x'435fffffffffffff'),
(x'1330000000000000'),
(x'133fffffffffffff'),
(x'3a6fa7161a4d6e0c');
--Testcase 268:
select float8send(flt) as ibits,
flt,
flt::text::float8 as r_flt,
float8send(flt::text::float8) as obits,
float8send(flt::text::float8) = float8send(flt) as correct
from (select bits::bit(64)::bigint::xfloat8::float8 as flt
from testdata
offset 0) s;
rollback;
-- clean up, lest opr_sanity complain
--Testcase 269:
drop type xfloat8 cascade;
-- Clean up
DO $d$
declare
l_rec record;
begin
for l_rec in (select foreign_table_schema, foreign_table_name
from information_schema.foreign_tables) loop
execute format('drop foreign table %I.%I cascade;', l_rec.foreign_table_schema, l_rec.foreign_table_name);
end loop;
end;
$d$;
--Testcase 270:
DROP SERVER sqlite_svr;
--Testcase 271:
DROP EXTENSION duckdb_fdw CASCADE; | the_stack |
--
-- Table structure for table `bb_attach_quota`
--
DROP TABLE IF EXISTS `bb_attach_quota`;
CREATE TABLE `bb_attach_quota` (
`user_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`group_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`quota_type` smallint(2) NOT NULL DEFAULT '0',
`quota_limit_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
KEY `quota_type` (`quota_type`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_attachments` ***
--
DROP TABLE IF EXISTS `bb_attachments`;
CREATE TABLE `bb_attachments` (
`attach_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`privmsgs_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id_1` mediumint(8) NOT NULL DEFAULT '0',
`user_id_2` mediumint(8) NOT NULL DEFAULT '0',
KEY `attach_id_post_id` (`attach_id`,`post_id`),
KEY `attach_id_privmsgs_id` (`attach_id`,`privmsgs_id`),
KEY `post_id` (`post_id`),
KEY `privmsgs_id` (`privmsgs_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_attachments_config`
--
DROP TABLE IF EXISTS `bb_attachments_config`;
CREATE TABLE `bb_attachments_config` (
`config_name` varchar(255) NOT NULL DEFAULT '',
`config_value` varchar(255) NOT NULL DEFAULT '',
PRIMARY KEY (`config_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_attachments_desc` ***
--
DROP TABLE IF EXISTS `bb_attachments_desc`;
CREATE TABLE `bb_attachments_desc` (
`attach_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`physical_filename` varchar(255) NOT NULL DEFAULT '',
`real_filename` varchar(255) DEFAULT NULL,
`download_count` mediumint(8) unsigned NOT NULL DEFAULT '0',
`comment` varchar(255) DEFAULT NULL,
`extension` varchar(100) DEFAULT NULL,
`mimetype` varchar(100) DEFAULT NULL,
`filesize` int(20) NOT NULL DEFAULT '0',
`filetime` int(11) NOT NULL DEFAULT '0',
`thumbnail` tinyint(1) NOT NULL DEFAULT '0',
`tracker_status` tinyint(1) NOT NULL DEFAULT '0',
`thanks` mediumint(8) NOT NULL DEFAULT '0',
`rating_sum` int(11) NOT NULL DEFAULT '0',
`rating_count` mediumint(8) NOT NULL DEFAULT '0',
`force_private` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`attach_id`),
KEY `filetime` (`filetime`),
KEY `physical_filename` (`physical_filename`(10)),
KEY `filesize` (`filesize`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_attachments_rating` ***
--
DROP TABLE IF EXISTS `bb_attachments_rating`;
CREATE TABLE `bb_attachments_rating` (
`attach_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id` mediumint(9) NOT NULL DEFAULT '0',
`thanked` tinyint(1) NOT NULL DEFAULT '0',
`rating` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`attach_id`,`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_auth_access` ***
--
DROP TABLE IF EXISTS `bb_auth_access`;
CREATE TABLE `bb_auth_access` (
`group_id` mediumint(8) NOT NULL DEFAULT '0',
`forum_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`auth_view` tinyint(1) NOT NULL DEFAULT '0',
`auth_read` tinyint(1) NOT NULL DEFAULT '0',
`auth_post` tinyint(1) NOT NULL DEFAULT '0',
`auth_reply` tinyint(1) NOT NULL DEFAULT '0',
`auth_edit` tinyint(1) NOT NULL DEFAULT '0',
`auth_delete` tinyint(1) NOT NULL DEFAULT '0',
`auth_sticky` tinyint(1) NOT NULL DEFAULT '0',
`auth_announce` tinyint(1) NOT NULL DEFAULT '0',
`auth_vote` tinyint(1) NOT NULL DEFAULT '0',
`auth_pollcreate` tinyint(1) NOT NULL DEFAULT '0',
`auth_attachments` tinyint(1) NOT NULL DEFAULT '0',
`auth_mod` tinyint(1) NOT NULL DEFAULT '0',
`auth_download` tinyint(1) NOT NULL DEFAULT '0',
KEY `group_id` (`group_id`),
KEY `forum_id` (`forum_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_banlist`
--
DROP TABLE IF EXISTS `bb_banlist`;
CREATE TABLE `bb_banlist` (
`ban_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`ban_userid` mediumint(8) NOT NULL DEFAULT '0',
`ban_ip` varchar(8) NOT NULL DEFAULT '',
`ban_email` varchar(255) DEFAULT NULL,
PRIMARY KEY (`ban_id`),
KEY `ban_ip_user_id` (`ban_ip`,`ban_userid`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_bt_config`
--
DROP TABLE IF EXISTS `bb_bt_config`;
CREATE TABLE `bb_bt_config` (
`config_name` varchar(255) NOT NULL DEFAULT '',
`config_value` varchar(255) DEFAULT NULL,
PRIMARY KEY (`config_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_bt_search_results`
--
DROP TABLE IF EXISTS `bb_bt_search_results`;
CREATE TABLE `bb_bt_search_results` (
`session_id` varchar(32) NOT NULL DEFAULT '',
`search_id` int(10) unsigned NOT NULL DEFAULT '0',
`added` int(11) NOT NULL DEFAULT '0',
`search_array` text NOT NULL,
`search_settings` text NOT NULL,
PRIMARY KEY (`session_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_bt_tor_dl_stat` ***
--
DROP TABLE IF EXISTS `bb_bt_tor_dl_stat`;
CREATE TABLE `bb_bt_tor_dl_stat` (
`torrent_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id` mediumint(9) NOT NULL DEFAULT '0',
`attach_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`t_up_total` bigint(20) unsigned NOT NULL DEFAULT '0',
`t_down_total` bigint(20) unsigned NOT NULL DEFAULT '0',
`t_bonus_total` bigint(20) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`torrent_id`,`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 DELAY_KEY_WRITE=1 ROW_FORMAT=DYNAMIC;
--
-- Table structure for table `bb_bt_torrents` ***
--
DROP TABLE IF EXISTS `bb_bt_torrents`;
CREATE TABLE `bb_bt_torrents` (
`torrent_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`info_hash` varbinary(20) NOT NULL,
`post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`poster_id` mediumint(9) NOT NULL DEFAULT '0',
`topic_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`attach_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`size` bigint(20) unsigned NOT NULL DEFAULT '0',
`piece_length` mediumint(8) unsigned NOT NULL DEFAULT '0',
`reg_time` int(11) NOT NULL DEFAULT '0',
`complete_count` mediumint(8) unsigned NOT NULL DEFAULT '0',
`seeder_last_seen` int(11) NOT NULL DEFAULT '0',
`last_seeder_uid` mediumint(9) NOT NULL DEFAULT '0',
`topic_check_status` tinyint(3) unsigned NOT NULL DEFAULT '0',
`topic_check_uid` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_check_date` int(11) NOT NULL DEFAULT '0',
`topic_check_first_fid` mediumint(8) NOT NULL DEFAULT '0',
`topic_check_duble_tid` mediumint(8) NOT NULL DEFAULT '0',
`leechers` int(11) NOT NULL DEFAULT '0',
`seeders` int(11) NOT NULL DEFAULT '0',
`speed_ul` bigint(20) unsigned NOT NULL DEFAULT '0',
`speed_dl` bigint(20) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`torrent_id`),
UNIQUE KEY `post_id` (`post_id`),
UNIQUE KEY `topic_id` (`topic_id`),
UNIQUE KEY `attach_id` (`attach_id`),
UNIQUE KEY `info_hash` (`info_hash`),
KEY `reg_time` (`reg_time`),
KEY `poster_id` (`poster_id`),
KEY `size` (`size`),
KEY `topic_check_uid` (`topic_check_uid`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_bt_torrents_del`
--
DROP TABLE IF EXISTS `bb_bt_torrents_del`;
CREATE TABLE `bb_bt_torrents_del` (
`info_hash` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
`is_del` tinyint(4) NOT NULL DEFAULT '1',
`dl_percent` tinyint(4) NOT NULL DEFAULT '100',
`torrent_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`torrent_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_bt_tracker`
--
DROP TABLE IF EXISTS `bb_bt_tracker`;
CREATE TABLE `bb_bt_tracker` (
`torrent_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`peer_id` varchar(35) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL,
`user_id` mediumint(9) NOT NULL DEFAULT '0',
`ip` char(8) NOT NULL DEFAULT '0',
`ipv6` varchar(32) DEFAULT NULL,
`port` smallint(5) unsigned NOT NULL DEFAULT '0',
`uploaded` bigint(20) unsigned NOT NULL DEFAULT '0',
`downloaded` bigint(20) unsigned NOT NULL DEFAULT '0',
`complete_percent` bigint(20) unsigned NOT NULL DEFAULT '0',
`seeder` tinyint(1) NOT NULL DEFAULT '0',
`last_stored_up` bigint(20) unsigned NOT NULL DEFAULT '0',
`last_stored_down` bigint(20) unsigned NOT NULL DEFAULT '0',
`stat_last_updated` int(11) NOT NULL DEFAULT '0',
`speed_up` mediumint(8) unsigned NOT NULL DEFAULT '0',
`speed_down` mediumint(8) unsigned NOT NULL DEFAULT '0',
`update_time` int(11) NOT NULL DEFAULT '0',
`expire_time` int(11) NOT NULL DEFAULT '0',
UNIQUE KEY `torrent_peer_id` (`torrent_id`,`peer_id`),
KEY `user_id` (`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 MAX_ROWS=1000000000 DELAY_KEY_WRITE=1 ROW_FORMAT=DYNAMIC;
--
-- Table structure for table `bb_bt_users` ***
--
DROP TABLE IF EXISTS `bb_bt_users`;
CREATE TABLE `bb_bt_users` (
`user_id` mediumint(9) NOT NULL DEFAULT '0',
`auth_key` char(10) NOT NULL DEFAULT '',
`u_up_total` bigint(20) unsigned NOT NULL DEFAULT '0',
`u_bonus_total` bigint(20) unsigned NOT NULL DEFAULT '0',
`u_down_total` bigint(20) unsigned NOT NULL DEFAULT '0',
`u_bonus_today` bigint(20) NOT NULL DEFAULT '0',
`u_bonus_hourly` bigint(20) NOT NULL DEFAULT '0',
`u_down_today` bigint(20) NOT NULL DEFAULT '0',
`u_up_today` bigint(20) NOT NULL DEFAULT '0',
`u_bonus_yday` bigint(20) NOT NULL DEFAULT '0',
`u_down_yday` bigint(20) NOT NULL DEFAULT '0',
`u_up_yday` bigint(20) NOT NULL DEFAULT '0',
`max_up_speed` bigint(20) unsigned NOT NULL DEFAULT '0',
`max_down_speed` bigint(20) unsigned NOT NULL DEFAULT '0',
`ratio_nulled` tinyint(1) unsigned NOT NULL DEFAULT '0',
`u_up_old` bigint(20) NOT NULL,
`u_down_old` bigint(20) NOT NULL,
`u_bonus_old` bigint(20) NOT NULL,
`max_up_speed_old` bigint(20) NOT NULL,
`u_releases` mediumint(6) NOT NULL DEFAULT '0',
`can_leech` tinyint(1) NOT NULL DEFAULT '1',
PRIMARY KEY (`user_id`),
UNIQUE KEY `auth_key` (`auth_key`),
KEY `user_id_down` (`user_id`,`u_down_total`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 DELAY_KEY_WRITE=1 ROW_FORMAT=FIXED;
--
-- Table structure for table `bb_bt_users_dl_status` ***
--
DROP TABLE IF EXISTS `bb_bt_users_dl_status`;
CREATE TABLE `bb_bt_users_dl_status` (
`topic_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id` mediumint(9) NOT NULL DEFAULT '0',
`user_status` tinyint(1) NOT NULL DEFAULT '0',
`compl_count` tinyint(1) unsigned NOT NULL DEFAULT '0',
`update_time` int(11) NOT NULL DEFAULT '0',
`drop_release` tinyint(4) NOT NULL DEFAULT '0',
PRIMARY KEY (`topic_id`,`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 DELAY_KEY_WRITE=1 ROW_FORMAT=FIXED;
--
-- Table structure for table `bb_categories` ***
--
DROP TABLE IF EXISTS `bb_categories`;
CREATE TABLE `bb_categories` (
`cat_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`cat_title` varchar(100) DEFAULT NULL,
`cat_title_short` varchar(100) DEFAULT NULL,
`cat_title_hashtag` varchar(100) DEFAULT NULL,
`cat_url` varchar(100) DEFAULT NULL,
`cat_desc` varchar(100) DEFAULT NULL,
`cat_order` mediumint(8) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`cat_id`),
KEY `cat_order` (`cat_order`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_config`
--
DROP TABLE IF EXISTS `bb_config`;
CREATE TABLE `bb_config` (
`config_name` varchar(255) NOT NULL DEFAULT '',
`config_value` varchar(255) DEFAULT NULL,
PRIMARY KEY (`config_name`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_confirm`
--
DROP TABLE IF EXISTS `bb_confirm`;
CREATE TABLE `bb_confirm` (
`confirm_id` char(32) NOT NULL DEFAULT '',
`session_id` char(32) NOT NULL DEFAULT '',
`code` char(10) NOT NULL,
PRIMARY KEY (`session_id`,`confirm_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_disallow`
--
DROP TABLE IF EXISTS `bb_disallow`;
CREATE TABLE `bb_disallow` (
`disallow_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`disallow_username` varchar(25) NOT NULL DEFAULT '',
PRIMARY KEY (`disallow_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_dl_st_prevs_list`
--
DROP TABLE IF EXISTS `bb_dl_st_prevs_list`;
CREATE TABLE `bb_dl_st_prevs_list` (
`prevs_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`prevs_user_pg` mediumint(8) NOT NULL DEFAULT '0',
`prevs_user_png` mediumint(8) NOT NULL DEFAULT '0',
`prevs_group_g` mediumint(8) NOT NULL DEFAULT '0',
`prevs_until` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`prevs_id`),
KEY `prevs_user_pg_user_png_group_g` (`prevs_user_pg`,`prevs_user_png`,`prevs_group_g`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_easymod`
--
DROP TABLE IF EXISTS `bb_easymod`;
CREATE TABLE `bb_easymod` (
`mod_id` mediumint(8) NOT NULL AUTO_INCREMENT,
`mod_title` varchar(255) CHARACTER SET cp1251 DEFAULT '',
`mod_file` varchar(255) CHARACTER SET cp1251 DEFAULT '',
`mod_version` varchar(15) CHARACTER SET cp1251 DEFAULT '',
`mod_author_handle` varchar(25) CHARACTER SET cp1251 DEFAULT '',
`mod_author_email` varchar(100) CHARACTER SET cp1251 DEFAULT '',
`mod_author_name` varchar(100) CHARACTER SET cp1251 DEFAULT '',
`mod_author_url` varchar(100) CHARACTER SET cp1251 DEFAULT '',
`mod_description` text CHARACTER SET cp1251,
`mod_process_date` int(11) DEFAULT '0',
`mod_phpBB_version` varchar(15) CHARACTER SET cp1251 DEFAULT '',
`mod_processed_themes` varchar(200) CHARACTER SET cp1251 DEFAULT '',
`mod_processed_langs` varchar(200) CHARACTER SET cp1251 DEFAULT '',
`mod_files_edited` mediumint(8) DEFAULT '0',
`mod_tables_added` mediumint(8) DEFAULT '0',
`mod_tables_altered` mediumint(8) DEFAULT '0',
`mod_rows_inserted` mediumint(8) DEFAULT '0',
PRIMARY KEY (`mod_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_easymod_processed_files`
--
DROP TABLE IF EXISTS `bb_easymod_processed_files`;
CREATE TABLE `bb_easymod_processed_files` (
`mod_processed_file` varchar(255) CHARACTER SET cp1251 NOT NULL DEFAULT '',
`mod_id` mediumint(8) NOT NULL DEFAULT '0',
KEY `mod_processed_file` (`mod_processed_file`),
KEY `mod_id` (`mod_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_extension_groups`
--
DROP TABLE IF EXISTS `bb_extension_groups`;
CREATE TABLE `bb_extension_groups` (
`group_id` mediumint(8) NOT NULL AUTO_INCREMENT,
`group_name` varchar(20) NOT NULL DEFAULT '',
`cat_id` tinyint(2) NOT NULL DEFAULT '0',
`allow_group` tinyint(1) NOT NULL DEFAULT '0',
`download_mode` tinyint(1) unsigned NOT NULL DEFAULT '1',
`upload_icon` varchar(100) DEFAULT '',
`max_filesize` int(20) NOT NULL DEFAULT '0',
`forum_permissions` varchar(255) NOT NULL DEFAULT '',
PRIMARY KEY (`group_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_extensions`
--
DROP TABLE IF EXISTS `bb_extensions`;
CREATE TABLE `bb_extensions` (
`ext_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`group_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`extension` varchar(100) NOT NULL DEFAULT '',
`comment` varchar(100) DEFAULT NULL,
PRIMARY KEY (`ext_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_flags`
--
DROP TABLE IF EXISTS `bb_flags`;
CREATE TABLE `bb_flags` (
`flag_id` int(10) NOT NULL AUTO_INCREMENT,
`flag_name` varchar(25) DEFAULT '',
`flag_image` varchar(25) DEFAULT '',
PRIMARY KEY (`flag_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_forbidden_extensions`
--
DROP TABLE IF EXISTS `bb_forbidden_extensions`;
CREATE TABLE `bb_forbidden_extensions` (
`ext_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`extension` varchar(100) NOT NULL DEFAULT '',
PRIMARY KEY (`ext_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_forum_prune`
--
DROP TABLE IF EXISTS `bb_forum_prune`;
CREATE TABLE `bb_forum_prune` (
`prune_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`forum_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`prune_days` smallint(5) unsigned NOT NULL DEFAULT '0',
`prune_freq` smallint(5) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`prune_id`),
KEY `forum_id` (`forum_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_forums` ***
--
DROP TABLE IF EXISTS `bb_forums`;
CREATE TABLE `bb_forums` (
`forum_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`cat_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`forum_name` varchar(150) DEFAULT NULL,
`forum_desc` text,
`forum_status` tinyint(4) NOT NULL DEFAULT '0',
`forum_order` mediumint(8) unsigned NOT NULL DEFAULT '1',
`forum_posts` mediumint(8) unsigned NOT NULL DEFAULT '0',
`forum_topics` mediumint(8) unsigned NOT NULL DEFAULT '0',
`forum_last_post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`prune_next` int(11) DEFAULT NULL,
`prune_enable` tinyint(1) NOT NULL DEFAULT '0',
`auth_view` tinyint(2) NOT NULL DEFAULT '0',
`auth_read` tinyint(2) NOT NULL DEFAULT '0',
`auth_post` tinyint(2) NOT NULL DEFAULT '0',
`auth_reply` tinyint(2) NOT NULL DEFAULT '0',
`auth_edit` tinyint(2) NOT NULL DEFAULT '0',
`auth_delete` tinyint(2) NOT NULL DEFAULT '0',
`auth_sticky` tinyint(2) NOT NULL DEFAULT '0',
`auth_announce` tinyint(2) NOT NULL DEFAULT '0',
`auth_vote` tinyint(2) NOT NULL DEFAULT '0',
`auth_pollcreate` tinyint(2) NOT NULL DEFAULT '0',
`auth_attachments` tinyint(2) NOT NULL DEFAULT '0',
`auth_download` tinyint(2) NOT NULL DEFAULT '0',
`allow_reg_tracker` tinyint(1) NOT NULL DEFAULT '0',
`allow_dl_topic` tinyint(1) NOT NULL DEFAULT '0',
`dl_type_default` tinyint(1) NOT NULL DEFAULT '0',
`self_moderated` tinyint(1) NOT NULL DEFAULT '0',
`last_dl_topics_synch` int(11) NOT NULL DEFAULT '0',
`show_dl_buttons` tinyint(1) NOT NULL DEFAULT '0',
`forum_parent` mediumint(9) NOT NULL DEFAULT '0',
`show_on_index` tinyint(1) NOT NULL DEFAULT '1',
`forum_display_sort` tinyint(1) NOT NULL DEFAULT '0',
`forum_display_order` tinyint(1) NOT NULL DEFAULT '0',
`move_next` int(11) unsigned NOT NULL DEFAULT '0',
`recycle_move_next` int(11) unsigned NOT NULL DEFAULT '0',
`move_enable` tinyint(1) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`forum_id`),
KEY `forums_order` (`forum_order`),
KEY `cat_id` (`cat_id`),
KEY `forum_last_post_id` (`forum_last_post_id`),
KEY `forum_parent` (`forum_parent`),
KEY `auth_view` (`auth_view`),
KEY `auth_read` (`auth_read`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_groups` ***
--
DROP TABLE IF EXISTS `bb_groups`;
CREATE TABLE `bb_groups` (
`group_id` mediumint(8) NOT NULL AUTO_INCREMENT,
`group_type` tinyint(4) NOT NULL DEFAULT '1',
`group_name` varchar(40) DEFAULT NULL,
`group_description` varchar(255) DEFAULT NULL,
`group_moderator` mediumint(8) NOT NULL DEFAULT '0',
`group_single_user` tinyint(1) NOT NULL DEFAULT '1',
`group_order` mediumint(8) NOT NULL DEFAULT '0',
PRIMARY KEY (`group_id`),
KEY `group_single_user` (`group_single_user`),
KEY `group_name` (`group_name`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_posts` ***
--
DROP TABLE IF EXISTS `bb_posts`;
CREATE TABLE `bb_posts` (
`post_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`topic_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`forum_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`poster_id` mediumint(8) NOT NULL DEFAULT '0',
`post_time` int(11) NOT NULL DEFAULT '0',
`post_created` int(11) NOT NULL DEFAULT '0',
`poster_ip` varchar(32) NOT NULL DEFAULT '',
`post_username` varchar(25) DEFAULT NULL,
`enable_bbcode` tinyint(1) NOT NULL DEFAULT '1',
`enable_html` tinyint(1) NOT NULL DEFAULT '0',
`enable_smilies` tinyint(1) NOT NULL DEFAULT '1',
`enable_sig` tinyint(1) NOT NULL DEFAULT '1',
`post_edit_time` int(11) DEFAULT NULL,
`post_edit_count` smallint(5) unsigned NOT NULL DEFAULT '0',
`post_attachment` tinyint(1) NOT NULL DEFAULT '0',
`parsed` tinyint(1) NOT NULL DEFAULT '0',
`dont_cache` tinyint(1) NOT NULL DEFAULT '0',
`cache_file_md5` varchar(32) DEFAULT NULL,
PRIMARY KEY (`post_id`),
KEY `poster_id` (`poster_id`),
KEY `post_time` (`post_time`),
KEY `forum_id` (`forum_id`),
KEY `topic_time` (`topic_id`,`post_time`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_posts_edit` ***
--
DROP TABLE IF EXISTS `bb_posts_edit`;
CREATE TABLE `bb_posts_edit` (
`post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id` mediumint(8) NOT NULL DEFAULT '0',
`post_edit_count` smallint(5) unsigned NOT NULL DEFAULT '0',
`post_edit_time` int(11) DEFAULT NULL,
KEY `post_id` (`post_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_posts_text` ***
--
DROP TABLE IF EXISTS `bb_posts_text`;
CREATE TABLE `bb_posts_text` (
`post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`bbcode_uid` varchar(10) NOT NULL DEFAULT '',
`post_subject` varchar(120) DEFAULT NULL,
`post_text` mediumtext,
PRIMARY KEY (`post_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_privmsgs` ***
--
DROP TABLE IF EXISTS `bb_privmsgs`;
CREATE TABLE `bb_privmsgs` (
`privmsgs_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`privmsgs_type` tinyint(4) NOT NULL DEFAULT '0',
`privmsgs_subject` varchar(255) DEFAULT NULL,
`privmsgs_from_userid` mediumint(8) NOT NULL DEFAULT '0',
`privmsgs_to_userid` mediumint(8) NOT NULL DEFAULT '0',
`privmsgs_date` int(11) NOT NULL DEFAULT '0',
`privmsgs_ip` varchar(8) NOT NULL DEFAULT '',
`privmsgs_enable_bbcode` tinyint(1) NOT NULL DEFAULT '1',
`privmsgs_enable_html` tinyint(1) NOT NULL DEFAULT '0',
`privmsgs_enable_smilies` tinyint(1) NOT NULL DEFAULT '1',
`privmsgs_attach_sig` tinyint(1) NOT NULL DEFAULT '1',
`privmsgs_attachment` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`privmsgs_id`),
KEY `privmsgs_from_userid` (`privmsgs_from_userid`),
KEY `privmsgs_to_userid` (`privmsgs_to_userid`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_privmsgs_text` ***
--
DROP TABLE IF EXISTS `bb_privmsgs_text`;
CREATE TABLE `bb_privmsgs_text` (
`privmsgs_text_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`privmsgs_bbcode_uid` varchar(10) NOT NULL DEFAULT '0',
`privmsgs_text` text,
PRIMARY KEY (`privmsgs_text_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_quota_limits`
--
DROP TABLE IF EXISTS `bb_quota_limits`;
CREATE TABLE `bb_quota_limits` (
`quota_limit_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`quota_desc` varchar(20) NOT NULL DEFAULT '',
`quota_limit` bigint(20) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`quota_limit_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_ranks` ***
--
DROP TABLE IF EXISTS `bb_ranks`;
CREATE TABLE `bb_ranks` (
`rank_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,
`rank_title` varchar(50) DEFAULT NULL,
`rank_min` mediumint(8) NOT NULL DEFAULT '0',
`rank_special` tinyint(1) DEFAULT '0',
`rank_image` varchar(255) DEFAULT NULL,
`rank_group` mediumint(8) NOT NULL,
PRIMARY KEY (`rank_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_search_results`
--
DROP TABLE IF EXISTS `bb_search_results`;
CREATE TABLE `bb_search_results` (
`search_id` int(11) unsigned NOT NULL DEFAULT '0',
`session_id` varchar(32) NOT NULL DEFAULT '',
`search_array` mediumtext NOT NULL,
PRIMARY KEY (`search_id`),
KEY `session_id` (`session_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_search_wordlist`
--
DROP TABLE IF EXISTS `bb_search_wordlist`;
CREATE TABLE `bb_search_wordlist` (
`word_text` varchar(50) NOT NULL DEFAULT '',
`word_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`word_common` tinyint(1) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`word_text`),
KEY `word_id` (`word_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_search_wordmatch`
--
DROP TABLE IF EXISTS `bb_search_wordmatch`;
CREATE TABLE `bb_search_wordmatch` (
`post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`word_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`title_match` tinyint(1) NOT NULL DEFAULT '0',
KEY `post_id` (`post_id`),
KEY `word_id` (`word_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_sessions`
--
DROP TABLE IF EXISTS `bb_sessions`;
CREATE TABLE `bb_sessions` (
`session_id` char(32) NOT NULL DEFAULT '',
`session_user_id` mediumint(8) NOT NULL DEFAULT '0',
`session_start` int(11) NOT NULL DEFAULT '0',
`session_time` int(11) NOT NULL DEFAULT '0',
`session_ip` char(32) NOT NULL DEFAULT '0',
`session_page` int(11) NOT NULL DEFAULT '0',
`session_logged_in` tinyint(1) NOT NULL DEFAULT '0',
`session_admin` tinyint(2) NOT NULL DEFAULT '0',
PRIMARY KEY (`session_id`),
KEY `session_user_id` (`session_user_id`),
KEY `session_id_ip_user_id` (`session_id`,`session_ip`,`session_user_id`),
KEY `session_time` (`session_time`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC;
--
-- Table structure for table `bb_sessions_keys`
--
DROP TABLE IF EXISTS `bb_sessions_keys`;
CREATE TABLE `bb_sessions_keys` (
`key_id` varchar(32) NOT NULL DEFAULT '0',
`user_id` mediumint(8) NOT NULL DEFAULT '0',
`last_ip` varchar(8) NOT NULL DEFAULT '0',
`last_login` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`key_id`,`user_id`),
KEY `last_login` (`last_login`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_smilies`
--
DROP TABLE IF EXISTS `bb_smilies`;
CREATE TABLE `bb_smilies` (
`smilies_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,
`code` varchar(50) DEFAULT NULL,
`smile_url` varchar(100) DEFAULT NULL,
`emoticon` varchar(75) DEFAULT NULL,
PRIMARY KEY (`smilies_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_themes`
--
DROP TABLE IF EXISTS `bb_themes`;
CREATE TABLE `bb_themes` (
`themes_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`template_name` varchar(30) NOT NULL DEFAULT '',
`style_name` varchar(30) NOT NULL DEFAULT '',
`head_stylesheet` varchar(100) DEFAULT NULL,
`body_background` varchar(100) DEFAULT NULL,
`body_bgcolor` varchar(6) DEFAULT NULL,
`body_text` varchar(6) DEFAULT NULL,
`body_link` varchar(6) DEFAULT NULL,
`body_vlink` varchar(6) DEFAULT NULL,
`body_alink` varchar(6) DEFAULT NULL,
`body_hlink` varchar(6) DEFAULT NULL,
`tr_color1` varchar(6) DEFAULT NULL,
`tr_color2` varchar(6) DEFAULT NULL,
`tr_color3` varchar(6) DEFAULT NULL,
`tr_class1` varchar(25) DEFAULT NULL,
`tr_class2` varchar(25) DEFAULT NULL,
`tr_class3` varchar(25) DEFAULT NULL,
`th_color1` varchar(6) DEFAULT NULL,
`th_color2` varchar(6) DEFAULT NULL,
`th_color3` varchar(6) DEFAULT NULL,
`th_class1` varchar(25) DEFAULT NULL,
`th_class2` varchar(25) DEFAULT NULL,
`th_class3` varchar(25) DEFAULT NULL,
`td_color1` varchar(6) DEFAULT NULL,
`td_color2` varchar(6) DEFAULT NULL,
`td_color3` varchar(6) DEFAULT NULL,
`td_class1` varchar(25) DEFAULT NULL,
`td_class2` varchar(25) DEFAULT NULL,
`td_class3` varchar(25) DEFAULT NULL,
`fontface1` varchar(50) DEFAULT NULL,
`fontface2` varchar(50) DEFAULT NULL,
`fontface3` varchar(50) DEFAULT NULL,
`fontsize1` tinyint(4) DEFAULT NULL,
`fontsize2` tinyint(4) DEFAULT NULL,
`fontsize3` tinyint(4) DEFAULT NULL,
`fontcolor1` varchar(6) DEFAULT NULL,
`fontcolor2` varchar(6) DEFAULT NULL,
`fontcolor3` varchar(6) DEFAULT NULL,
`span_class1` varchar(25) DEFAULT NULL,
`span_class2` varchar(25) DEFAULT NULL,
`span_class3` varchar(25) DEFAULT NULL,
`img_size_poll` smallint(5) unsigned DEFAULT NULL,
`img_size_privmsg` smallint(5) unsigned DEFAULT NULL,
PRIMARY KEY (`themes_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_themes_name`
--
DROP TABLE IF EXISTS `bb_themes_name`;
CREATE TABLE `bb_themes_name` (
`themes_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`tr_color1_name` char(50) DEFAULT NULL,
`tr_color2_name` char(50) DEFAULT NULL,
`tr_color3_name` char(50) DEFAULT NULL,
`tr_class1_name` char(50) DEFAULT NULL,
`tr_class2_name` char(50) DEFAULT NULL,
`tr_class3_name` char(50) DEFAULT NULL,
`th_color1_name` char(50) DEFAULT NULL,
`th_color2_name` char(50) DEFAULT NULL,
`th_color3_name` char(50) DEFAULT NULL,
`th_class1_name` char(50) DEFAULT NULL,
`th_class2_name` char(50) DEFAULT NULL,
`th_class3_name` char(50) DEFAULT NULL,
`td_color1_name` char(50) DEFAULT NULL,
`td_color2_name` char(50) DEFAULT NULL,
`td_color3_name` char(50) DEFAULT NULL,
`td_class1_name` char(50) DEFAULT NULL,
`td_class2_name` char(50) DEFAULT NULL,
`td_class3_name` char(50) DEFAULT NULL,
`fontface1_name` char(50) DEFAULT NULL,
`fontface2_name` char(50) DEFAULT NULL,
`fontface3_name` char(50) DEFAULT NULL,
`fontsize1_name` char(50) DEFAULT NULL,
`fontsize2_name` char(50) DEFAULT NULL,
`fontsize3_name` char(50) DEFAULT NULL,
`fontcolor1_name` char(50) DEFAULT NULL,
`fontcolor2_name` char(50) DEFAULT NULL,
`fontcolor3_name` char(50) DEFAULT NULL,
`span_class1_name` char(50) DEFAULT NULL,
`span_class2_name` char(50) DEFAULT NULL,
`span_class3_name` char(50) DEFAULT NULL,
PRIMARY KEY (`themes_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_topics` ***
--
DROP TABLE IF EXISTS `bb_topics`;
CREATE TABLE `bb_topics` (
`topic_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`forum_id` smallint(8) unsigned NOT NULL DEFAULT '0',
`topic_title` varchar(255) DEFAULT NULL,
`topic_poster` mediumint(8) NOT NULL DEFAULT '0',
`topic_time` int(11) NOT NULL DEFAULT '0',
`topic_views` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_replies` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_status` tinyint(3) NOT NULL DEFAULT '0',
`topic_vote` tinyint(1) NOT NULL DEFAULT '0',
`topic_type` tinyint(3) NOT NULL DEFAULT '0',
`topic_first_post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_last_post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_moved_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`topic_attachment` tinyint(1) NOT NULL DEFAULT '0',
`topic_dl_type` tinyint(1) NOT NULL DEFAULT '0',
`topic_dl_status` tinyint(1) NOT NULL DEFAULT '0',
`topic_type_gold` tinyint(3) unsigned NOT NULL DEFAULT '0',
`topic_show_first_post` tinyint(1) unsigned NOT NULL DEFAULT '1',
`call_seed_time` int(11) DEFAULT '0',
`topic_cache_lock` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`topic_id`),
UNIQUE KEY `topic_last_post_id` (`topic_last_post_id`) USING BTREE,
KEY `forum_id` (`forum_id`),
KEY `topic_type` (`topic_type`),
KEY `topic_status` (`topic_status`),
KEY `topic_moved_id` (`topic_moved_id`),
KEY `topic_poster` (`topic_poster`),
KEY `topic_cache_lock` (`topic_cache_lock`)
) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
DROP TRIGGER IF EXISTS `bb_topics_au`;
DELIMITER $$
CREATE TRIGGER `bb_topics_au` AFTER UPDATE ON `bb_topics`
FOR EACH ROW if new.topic_type_gold != old.topic_type_gold then
insert into bb_bt_torrents_del (torrent_id,info_hash,is_del,dl_percent)
select torrent_id,info_hash,0
,case new.topic_type_gold when 1 then 0 when 2 then 50 when 3 then 75 when 4 then -1 else 100 end
from bb_bt_torrents t where t.topic_id=new.topic_id
ON DUPLICATE KEY UPDATE dl_percent=values(dl_percent);
end if
$$
DELIMITER ;
--
-- Table structure for table `bb_topics_move`
--
DROP TABLE IF EXISTS `bb_topics_move`;
CREATE TABLE `bb_topics_move` (
`id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`forum_id` smallint(5) unsigned NOT NULL DEFAULT '0',
`waits_days` smallint(5) unsigned NOT NULL DEFAULT '0',
`check_freq` smallint(5) unsigned NOT NULL DEFAULT '0',
`move_fid` smallint(5) unsigned NOT NULL DEFAULT '0',
`recycle_waits_days` smallint(5) unsigned NOT NULL DEFAULT '0',
`recycle_check_freq` smallint(5) unsigned NOT NULL DEFAULT '0',
`recycle_move_fid` smallint(5) unsigned NOT NULL DEFAULT '0',
PRIMARY KEY (`id`),
KEY `forum_id` (`forum_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_topics_watch` ***
--
DROP TABLE IF EXISTS `bb_topics_watch`;
CREATE TABLE `bb_topics_watch` (
`topic_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_id` mediumint(8) NOT NULL DEFAULT '0',
`notify_status` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`user_id`,`topic_id`),
KEY `topic_id` (`topic_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_user_group` ***
--
DROP TABLE IF EXISTS `bb_user_group`;
CREATE TABLE `bb_user_group` (
`group_id` mediumint(8) NOT NULL DEFAULT '0',
`user_id` mediumint(8) NOT NULL DEFAULT '0',
`user_pending` tinyint(1) DEFAULT NULL,
`group_moderator` tinyint(1) NOT NULL,
KEY `group_id` (`group_id`),
KEY `user_id` (`user_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_users` ***
--
DROP TABLE IF EXISTS `bb_users`;
CREATE TABLE `bb_users` (
`user_id` mediumint(8) NOT NULL DEFAULT '0',
`user_active` tinyint(1) DEFAULT '1',
`username` varchar(25) DEFAULT NULL,
`user_password2` varchar(60) NOT NULL DEFAULT '',
`user_session_time` int(11) NOT NULL DEFAULT '0',
`user_timer` int(11) NOT NULL,
`user_session_page` smallint(5) NOT NULL DEFAULT '0',
`user_lastvisit` int(11) NOT NULL DEFAULT '0',
`user_regdate` int(11) NOT NULL DEFAULT '0',
`user_level` tinyint(4) DEFAULT '0',
`user_posts` mediumint(8) unsigned NOT NULL DEFAULT '0',
`user_timezone` decimal(5,2) NOT NULL DEFAULT '0.00',
`user_style` tinyint(4) DEFAULT NULL,
`user_lang` varchar(255) DEFAULT NULL,
`user_dateformat` varchar(14) NOT NULL DEFAULT 'd M Y H:i',
`user_new_privmsg` smallint(5) unsigned NOT NULL DEFAULT '0',
`user_unread_privmsg` smallint(5) unsigned NOT NULL DEFAULT '0',
`user_last_privmsg` int(11) NOT NULL DEFAULT '0',
`user_login_tries` smallint(5) NOT NULL DEFAULT '0',
`user_last_login_try` int(11) NOT NULL DEFAULT '0',
`user_emailtime` int(11) DEFAULT NULL,
`user_viewemail` tinyint(1) DEFAULT NULL,
`user_attachsig` tinyint(1) DEFAULT NULL,
`user_allowhtml` tinyint(1) DEFAULT '1',
`user_allowbbcode` tinyint(1) DEFAULT '1',
`user_allowsmile` tinyint(1) DEFAULT '1',
`user_allowavatar` tinyint(1) NOT NULL DEFAULT '1',
`user_allow_pm` tinyint(1) NOT NULL DEFAULT '1',
`user_allow_viewonline` tinyint(1) NOT NULL DEFAULT '1',
`user_notify` tinyint(1) NOT NULL DEFAULT '1',
`user_notify_pm` tinyint(1) NOT NULL DEFAULT '0',
`user_popup_pm` tinyint(1) NOT NULL DEFAULT '0',
`user_rank` int(11) DEFAULT '0',
`user_avatar` varchar(100) DEFAULT NULL,
`user_avatar_type` tinyint(4) NOT NULL DEFAULT '0',
`user_email` varchar(255) DEFAULT NULL,
`user_icq` varchar(15) DEFAULT NULL,
`user_website` varchar(100) DEFAULT NULL,
`user_from` varchar(100) DEFAULT NULL,
`user_sig` text,
`user_sig_bbcode_uid` varchar(10) DEFAULT NULL,
`user_aim` varchar(255) DEFAULT NULL,
`user_yim` varchar(255) DEFAULT NULL,
`user_msnm` varchar(255) DEFAULT NULL,
`user_occ` varchar(100) DEFAULT NULL,
`user_interests` varchar(255) DEFAULT NULL,
`user_actkey` varchar(32) DEFAULT NULL,
`user_newpasswd` varchar(60) DEFAULT NULL,
`user_allow_passkey` tinyint(1) NOT NULL DEFAULT '1',
`user_from_flag` varchar(25) DEFAULT NULL,
`user_allowdefaultavatar` tinyint(1) NOT NULL DEFAULT '1',
`user_skype` varchar(255) DEFAULT NULL,
`user_warnings` tinyint(1) NOT NULL DEFAULT '0',
`user_banned` tinyint(1) NOT NULL DEFAULT '0',
`bt_tor_browse_set` text,
`user_unread_topics` text,
`user_hide_bt_stats` tinyint(1) NOT NULL DEFAULT '0',
`user_hide_bt_history` tinyint(1) NOT NULL DEFAULT '1',
`user_hide_bt_activity` tinyint(1) NOT NULL DEFAULT '0',
`user_hide_bt_topics` tinyint(1) NOT NULL DEFAULT '0',
`user_bt_ssl` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`user_id`),
KEY `user_level` (`user_level`),
KEY `user_session_time` (`user_session_time`),
KEY `user_regdate` (`user_regdate`),
KEY `username` (`username`),
KEY `user_email` (`user_email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_vote_desc` ***
--
DROP TABLE IF EXISTS `bb_vote_desc`;
CREATE TABLE `bb_vote_desc` (
`vote_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`topic_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`vote_text` text,
`vote_start` int(11) NOT NULL DEFAULT '0',
`vote_length` int(11) NOT NULL DEFAULT '0',
`vote_max` int(3) NOT NULL DEFAULT '1',
`vote_voted` int(7) NOT NULL DEFAULT '0',
`vote_hide` tinyint(1) NOT NULL DEFAULT '0',
`vote_tothide` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`vote_id`),
KEY `topic_id` (`topic_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_vote_results` ***
--
DROP TABLE IF EXISTS `bb_vote_results`;
CREATE TABLE `bb_vote_results` (
`vote_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`vote_option_id` tinyint(4) unsigned NOT NULL DEFAULT '0',
`vote_option_text` varchar(255) DEFAULT NULL,
`vote_result` int(11) NOT NULL DEFAULT '0',
KEY `vote_option_id` (`vote_option_id`),
KEY `vote_id` (`vote_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_vote_voters` ***
--
DROP TABLE IF EXISTS `bb_vote_voters`;
CREATE TABLE `bb_vote_voters` (
`vote_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`vote_user_id` mediumint(8) NOT NULL DEFAULT '0',
`vote_user_ip` char(32) NOT NULL DEFAULT '',
`vote_user_result` varchar(50) DEFAULT NULL,
`vote_x_forwarded_for` varchar(50) DEFAULT NULL,
`vote_timestamp` int(11) NOT NULL,
KEY `vote_id` (`vote_id`),
KEY `vote_user_id` (`vote_user_id`),
KEY `vote_user_ip` (`vote_user_ip`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_warnings` ***
--
DROP TABLE IF EXISTS `bb_warnings`;
CREATE TABLE `bb_warnings` (
`warning_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`warning_type` tinyint(1) unsigned NOT NULL DEFAULT '0',
`warning_post_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`warning_user_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`warning_poster_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`warning_posted` int(11) NOT NULL DEFAULT '0',
`warning_expires` int(11) NOT NULL DEFAULT '0',
`warning_proceed` tinyint(1) NOT NULL DEFAULT '0',
PRIMARY KEY (`warning_id`),
KEY `warning_user_id` (`warning_user_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_warnings_text` ***
--
DROP TABLE IF EXISTS `bb_warnings_text`;
CREATE TABLE `bb_warnings_text` (
`warning_id` mediumint(8) unsigned NOT NULL DEFAULT '0',
`bbcode_uid` varchar(10) CHARACTER SET latin1 NOT NULL DEFAULT '',
`warning_text` text,
PRIMARY KEY (`warning_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8;
--
-- Table structure for table `bb_words`
--
DROP TABLE IF EXISTS `bb_words`;
CREATE TABLE `bb_words` (
`word_id` mediumint(8) unsigned NOT NULL AUTO_INCREMENT,
`word` char(100) DEFAULT NULL,
`replacement` char(100) NOT NULL DEFAULT '',
PRIMARY KEY (`word_id`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=utf8; | the_stack |
set enable_global_stats = true;
/*
* This file is used to test the function of ExecVecAggregation()
*/
----
--- Create Table and Insert Data
----
create schema vector_agg_engine;
set current_schema=vector_agg_engine;
set time zone prc;
set time zone prc;
set datestyle to iso;
create table vector_agg_engine.ROW_AGG_TABLE_01
(
col_smallint smallint null
,col_integer integer default 23423
,col_bigint bigint default 923423432
,col_oid Oid
,col_real real
,col_numeric numeric(18,12) null
,col_numeric2 numeric null
,col_double_precision double precision
,col_decimal decimal(19) default 923423423
,col_char char(57) null
,col_char2 char default '0'
,col_varchar varchar(19)
,col_text text null
,col_varchar2 varchar2(20)
,col_time_without_time_zone time without time zone null
,col_time_with_time_zone time with time zone
,col_timestamp_without_timezone timestamp
,col_timestamp_with_timezone timestamptz
,col_smalldatetime smalldatetime
,col_money money
,col_date date
)
distribute by hash(col_integer);
create table vector_agg_engine.VECTOR_AGG_TABLE_01
(
col_smallint smallint null
,col_integer integer default 23423
,col_bigint bigint default 923423432
,col_oid Oid
,col_real real
,col_numeric numeric(18,12) null
,col_numeric2 numeric null
,col_double_precision double precision
,col_decimal decimal(19) default 923423423
,col_char char(57) null
,col_char2 char default '0'
,col_varchar varchar(19)
,col_text text null
,col_varchar2 varchar2(25)
,col_time_without_time_zone time without time zone null
,col_time_with_time_zone time with time zone
,col_timestamp_without_timezone timestamp
,col_timestamp_with_timezone timestamptz
,col_smalldatetime smalldatetime
,col_money money
,col_date date
) with (orientation = orc)
tablespace hdfs_ts
distribute by hash(col_integer);
CREATE OR REPLACE PROCEDURE func_insert_tbl_agg_01()
AS
BEGIN
FOR I IN 1..6000 LOOP
if I < 2000 then
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(1, i, 111111, 23, i + 10.001, 561.322815379585 + i, 20857435000485996218310931968699256001981133222933850071857221402.725428507532497489821939560364033880 + i * 2,8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'F', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '08:20:12', '06:26:42+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1997-03-03 11:04', 56 + i, '2005-02-14');
elsif i > 1999 AND i < 2488 then
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(1, i, 121111, 45, i + 9.08, 27.25684426652 + i * 0.001, 20857434798277339938397404472048722532796412222119506033298219314.596941867590737379779439339277062225 + i, 8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'T', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '14:21:56', '15:12:22+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1996-06-12 03:06', 56 + i, '2008-02-14');
else
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(1, i, 111131, 2345, i + 2.047, 39.2456977995 + i * 0.3, 20857434796839002905636223150710041116810786801730952028511523795.100678976382813790191855282491921088 + i * 1.5, 8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'F', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '19:07:24', '22:32:36+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1992-02-06 03:08', 56 + i, '2015-02-14');
end if;
END LOOP;
END;
/
CALL func_insert_tbl_agg_01();
insert into vector_agg_table_01 select * from row_agg_table_01;
create table vector_agg_engine.VECTOR_AGG_TABLE_02
( col_int int
,col_num numeric
,col_bint bigint
)with(orientation = orc)
tablespace hdfs_ts
distribute by hash(col_int);
COPY VECTOR_AGG_TABLE_02(col_int, col_num, col_bint) FROM stdin;
1 1.2 111111
4 2.1 111111
8 3.6 121111
2 2.4 111131
9 4.2 111111
3 1.6 111131
5 21.7 121111
3 5.6 111111
\.
create table vector_agg_engine.ROW_AGG_TABLE_03
(
col_smallint smallint null
,col_integer integer default 23423
,col_bigint bigint default 923423432
,col_real real
,col_numeric numeric(18,12) null
,col_serial bigint
,col_double_precision double precision
,col_decimal decimal(19) default 923423423
,col_char char(57) null
,col_char2 char default '0'
,col_varchar varchar(19)
,col_text text null
,col_varchar2 varchar2(25)
,col_time_without_time_zone time without time zone null
,col_time_with_time_zone time with time zone
,col_timestamp_without_timezone timestamp
,col_timestamp_with_timezone timestamptz
,col_smalldatetime smalldatetime
,col_money money
,col_date date
)
distribute by hash(col_integer)
partition by range(col_numeric, col_date, col_integer)(
partition partition_p1 values less than(3,'2002-02-04 00:01:00',20),
partition partition_p2 values less than(21,'2005-03-26 00:00:00',1061) ,
partition partition_p3 values less than(121,'2005-06-01 20:00:00',1600),
partition partition_p4 values less than(121,'2006-08-01 20:00:00',1987) ,
partition partition_p5 values less than(1456,'2007-12-03 10:00:00',2567),
partition partition_p6 values less than(2678,'2008-02-03 11:01:34',2800),
partition partition_p7 values less than(3601,'2008-02-13 01:01:34',3801),
partition partition_p8 values less than(3601,'2012-04-18 23:01:44',4560),
partition partition_p9 values less than(4500,'2012-06-18 23:01:44',4900),
partition partition_p10 values less than(9845,'2016-06-28 23:21:44',6200)) ;
CREATE OR REPLACE PROCEDURE func_insert_tbl_agg_03()
AS
BEGIN
FOR I IN 1..1000 LOOP
if I < 200 then
INSERT INTO vector_agg_engine.row_agg_table_03 VALUES(1, i, 111111, i + 10.001, 561.322815379585 + i, 1112 + i * 3, 8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'F', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '08:20:12', '06:26:42+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1997-02-02 03:04', 56 + i, '2005-02-14');
elsif i > 199 AND i < 248 then
INSERT INTO vector_agg_engine.row_agg_table_03 VALUES(4, i + 2000, 121111, i + 2009.08, 27.25684426652 + i * 0.001, 25467 + i * 2, 8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'F', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '14:21:56', '15:12:22+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1997-02-02 03:04', 56 + i, '2008-02-14');
else
INSERT INTO vector_agg_engine.row_agg_table_03 VALUES(7, i + 4000, 111131, i + 4002.047, 39.2456977995 + i * 0.3, 3658742 + i, 8885.169 - i * 0.125, 61032419811910588 + i, 'test_agg_'||i, 'F', 'vector_agg_'||i, '597b5b23f4aadf9513306bcd59afb6e4c9_'||i, 'beijing_agg'||i, '19:07:24', '22:32:36+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', '1997-02-02 03:04', 56 + i, '2015-02-14');
end if;
END LOOP;
END;
/
CALL func_insert_tbl_agg_03();
create table vector_agg_engine.VECTOR_AGG_TABLE_03
(
col_smallint smallint null
,col_integer integer default 23423
,col_bigint bigint default 923423432
,col_real real
,col_numeric numeric(18,12) null
,col_numeric2 numeric null
,col_double_precision double precision
,col_decimal decimal(19) default 923423423
,col_char char(57) null
,col_char2 char default '0'
,col_varchar varchar(19)
,col_text text null
,col_varchar2 varchar2(25)
,col_time_without_time_zone time without time zone null
,col_time_with_time_zone time with time zone
,col_timestamp_without_timezone timestamp
,col_timestamp_with_timezone timestamptz
,col_smalldatetime smalldatetime
,col_money money
,col_date date
) with (orientation = column, max_batchrow = 10000)
distribute by hash(col_integer)
partition by range(col_numeric, col_date, col_integer)(
partition partition_p1 values less than(3,'2002-02-04 00:01:00',20),
partition partition_p2 values less than(21,'2005-03-26 00:00:00',1061) ,
partition partition_p3 values less than(121,'2005-06-01 20:00:00',1600),
partition partition_p4 values less than(121,'2006-08-01 20:00:00',1987) ,
partition partition_p5 values less than(1456,'2007-12-03 10:00:00',2567),
partition partition_p6 values less than(2678,'2008-02-03 11:01:34',2800),
partition partition_p7 values less than(3601,'2008-02-13 01:01:34',3801),
partition partition_p8 values less than(3601,'2012-04-18 23:01:44',4560),
partition partition_p9 values less than(4500,'2012-06-18 23:01:44',4900),
partition partition_p10 values less than(9845,'2016-06-28 23:21:44',6200)) ;
insert into VECTOR_AGG_TABLE_03 select * from ROW_AGG_TABLE_03;
create table vector_agg_engine.VECTOR_AGG_TABLE_04
(
col_id integer
,col_place varchar
)with (orientation = orc)
tablespace hdfs_ts;
COPY VECTOR_AGG_TABLE_04(col_id, col_place) FROM stdin;
12 xian
\N xian
\N tianshui
\N tianshui
6 beijing
6 beijing
4 beijing
8 beijing
\.
create table vector_agg_engine.VECTOR_AGG_TABLE_05
(
col_id integer
,col_place varchar
)with (orientation = orc)
tablespace hdfs_ts;
COPY VECTOR_AGG_TABLE_05(col_id, col_place) FROM stdin;
12 tian
\N tian
\N xian
\N tshui
6 beijing
6 beijing
4 beijing
8 beijing
\.
create table vector_agg_engine.VECTOR_AGG_TABLE_06
(
col_int int
,col_int2 int8
,col_char char(20)
,col_varchar varchar(30)
,col_date date
,col_num numeric(10,2)
,col_num2 numeric(10,4)
,col_float float4
,col_float2 float8
)WITH (orientation = orc)
tablespace hdfs_ts
distribute by hash (col_int);
analyze vector_agg_table_01;
analyze vector_agg_table_02;
analyze vector_agg_table_03;
analyze vector_agg_table_04;
analyze vector_agg_table_05;
analyze vector_agg_table_06;
----
--- case 1: Basic Test Without NULL (HashAgg || SortAgg)
----
explain (verbose on, costs off) select count(*), sum(col_integer), avg(col_integer), sum(col_integer)::float8/count(*), col_bigint from vector_agg_table_01 group by col_bigint order by col_bigint;
select count(*), sum(col_integer), avg(col_integer), sum(col_integer)::float8/count(*), col_bigint from vector_agg_table_01 group by col_bigint order by col_bigint;
select count(*), sum(A.col_integer * B.col_int), avg(A.col_integer * B.col_int), sum(A.col_integer * B.col_int)::float8/count(*), A.col_bigint, B.col_bint from vector_agg_table_01 A full outer join vector_agg_table_02 B on A.col_bigint = B.col_bint group by A.col_bigint, B.col_bint order by A.col_bigint, B.col_bint;
select sum(y) from (select sum(col_integer) y, col_bigint%2 x from vector_agg_table_01 group by col_bigint) q1 group by x order by x;
select col_bigint - 1, col_bigint + 1, sum(col_integer), col_bigint - 3, avg(col_integer), col_bigint%2, min(col_integer) from vector_agg_table_01 group by col_bigint order by col_bigint;
select col_integer from vector_agg_table_01 group by col_integer order by col_integer limit 10;
select col_integer + col_numeric from vector_agg_table_01 group by col_integer + col_numeric order by col_integer + col_numeric limit 10 offset 100;
select A.col_integer + B.col_bint, A.col_integer, B.col_bint from vector_agg_table_01 A, vector_agg_table_02 B where A.col_bigint = B.col_bint group by A.col_integer, B.col_bint order by 1, 2 limit 30;
select A.col_integer + B.col_bint from vector_agg_table_01 A, vector_agg_table_02 B where A.col_bigint = B.col_bint group by A.col_integer + B.col_bint order by 1 limit 30;
select count(*) + sum(col_integer) + avg(col_integer), col_bigint from vector_agg_table_01 group by col_bigint order by col_bigint;
select col_numeric, avg(col_numeric) from vector_agg_table_01 group by col_numeric order by col_numeric limit 50;
select col_double_precision, avg(col_double_precision) from vector_agg_table_01 group by col_double_precision order by col_double_precision limit 50;
select col_decimal, avg(col_decimal) from vector_agg_table_01 group by col_decimal order by col_decimal limit 50;
select sum(col_money), min(col_money),max(col_money) from vector_agg_table_01 limit 50;
select sum(col_money), min(col_money),max(col_money) from vector_agg_table_01 group by col_time_without_time_zone order by 1;
select A.col_money + B.col_money from vector_agg_table_01 A, vector_agg_table_03 B order by 1 limit 10;
select min(col_oid), max(col_oid) from vector_agg_table_01;
select min(col_smalldatetime), max(col_smalldatetime) from vector_agg_table_01;
select min(col_smalldatetime), max(col_smalldatetime) from vector_agg_table_01 group by col_smalldatetime order by 1, 2;
----
--- case 2: Basic Test with NULL
----
delete from vector_agg_table_01;
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(1, 12, NULL, 23, 12.021, 515.343815379585, NULL, 8875.15, 61032419811910575, 'test_agg_0', 'T', 'vector_agg_0', '597b5b23f4abcf9513306bcd59afb6e4c9_0', 'beijing_agg_0', '17:25:28', NULL, NULL, '1971-03-23 11:14:05', '1997-02-02 03:04', 79.3, '2005-02-25');
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(3, NULL, 114531, NULL, 2.047, NULL, 20857434796839002905636223150710041116810786801730952028511513795.100678976382813790191855282491921068, 8885.169, NULL, 'test_agg_7002', 'T', 'vector_agg_7002', '597b5b23f4aadf9473306bcd59afb6e4c9', 'beijing_agg_7002', '16:19:25', NULL, 'Mon Jan 15 17:32:01.4 1997 PST', NULL, '1997-02-02 03:04', 21.6, '2005-06-12');
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(1, 15, 152161, 2355, NULL, 227.25684426652, 18857434798277339938397404472048722532796412222119506033298219325.596941867590737379779439339277062225, NULL, 61032419811910588, 'test_agg_0', 'F', 'vector_agg_0', '597b5b23f4aadf9513306bcd59afb6e4c9_0', 'beijing_agg_0', NULL, '14:32:42+08', 'Mon Feb 10 17:32:01.4 1997 PST', '1971-03-23 11:14:05', NULL, 56, NULL);
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(NULL, 12, 121111, 25, 9.08, 27.25684426652, 20857434798277339938397404472048722532796412222119506033298219314.596941867590737379779439339277062225, 8885.169, 61032419811910588, NULL, 'T', NULL, NULL, 'beijing_agg_8010', '17:09:24', NULL, 'Mon Feb 10 17:32:01.4 1997 PST', NULL, '1997-02-02 03:04', 56, '2005-02-14');
INSERT INTO vector_agg_engine.row_agg_table_01 VALUES(NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
insert into vector_agg_table_01 select * from row_agg_table_01;
select avg(col_integer), sum(col_integer), count(*), col_bigint from vector_agg_table_01 group by col_bigint order by col_bigint;
select col_integer, sum(col_integer),sum(col_bigint), col_integer from vector_agg_table_01 group by 1,4 order by 1,2,3,4 limit 5;
select col_char2 from vector_agg_table_01 group by col_char2 order by col_char2;
select col_char2, count(1) from vector_agg_table_01 group by col_char2 order by col_char2;
select col_char2, count(col_char2) from vector_agg_table_01 group by col_char2 order by col_char2;
select count(*) from vector_agg_table_01 where col_char2 is null group by col_char2;
select max(col_smallint), min(col_smallint) from vector_agg_table_01 where col_smallint is not null;
select min(col_numeric), max(col_numeric), sum(col_numeric), avg(col_numeric) from vector_agg_table_01 group by col_smallint order by col_smallint;
select min(col_decimal), max(col_decimal), sum(col_decimal), avg(col_decimal) from vector_agg_table_01 group by col_date order by col_date;
select max(col_time_without_time_zone), min(col_time_without_time_zone) from vector_agg_table_01 where col_time_without_time_zone is not null;
select max(col_time_with_time_zone), min(col_time_with_time_zone) from vector_agg_table_01 where col_time_with_time_zone is not null;
select max(col_timestamp_without_timezone), min(col_timestamp_without_timezone) from vector_agg_table_01 where col_timestamp_without_timezone is not null;
select max(col_timestamp_with_timezone), min(col_timestamp_with_timezone) from vector_agg_table_01 where col_timestamp_with_timezone is not null;
select sum(col_time_without_time_zone), avg(col_time_without_time_zone) from vector_agg_table_01;
select sum(col_time_without_time_zone), avg(col_time_without_time_zone) from vector_agg_table_01 group by col_bigint order by 1, 2;
select sum(col_time_without_time_zone), avg(col_time_without_time_zone) from vector_agg_table_01 group by col_time_without_time_zone order by 1, 2;
----
--- case 3: Basic Test with Partition
----
select count(*), sum(length(col_varchar2)), avg(col_numeric) from vector_agg_table_03 where 'DAsdf;redis' = 'DAsdf;redis' and col_smallint != 4 or col_integer%148=1 and col_varchar != 'vector_agg_500' or col_numeric < 1239.5 and col_smallint in (1,5,7) order by 1,2 limit 5;
select count(*), null as "hnull", sum(col_integer) as c_int_sum, avg(col_numeric) as c_num_avg, sum(col_real) as c_real_sum, avg(col_double_precision) as c_dp_avg, sum(col_decimal) as c_dec_num from vector_agg_table_03 where col_integer <= 201 and col_smallint >= 1 or col_bigint < 10200 and col_smallint != 7 and col_smallint != 4;
----
--- cas4: Test Agg With NULL Table
----
select count(col_char),count(col_date),count(col_num2) from vector_agg_table_06;
select count(col_char),count(col_date),count(col_num2) from vector_agg_table_06 group by col_num;
select min(col_int),max(col_varchar),min(col_num2) from vector_agg_table_06;
select min(col_int),max(col_varchar),min(col_num2) from vector_agg_table_06 group by col_date;
select sum(col_int2),sum(col_float),sum(col_num2) from vector_agg_table_06;
select sum(col_int2),sum(col_float),sum(col_num2) from vector_agg_table_06 group by col_int;
select avg(col_int2),avg(col_num),avg(col_float) from vector_agg_table_06;
select avg(col_int2),avg(col_num),avg(col_float) from vector_agg_table_06 group by col_num2;
select count(col_num2),min(col_char),max(col_varchar),sum(col_float),avg(col_num2) from vector_agg_table_06;
select count(col_num2),min(col_char),max(col_varchar),sum(col_float),avg(col_num2) from vector_agg_table_06 group by col_num2;
select count(col_char),count(col_date),count(col_num2) from vector_agg_table_06;
select count(col_char),count(col_date),count(col_num2) from vector_agg_table_06 group by col_num;
select min(col_int),max(col_varchar),min(col_num2) from vector_agg_table_06;
select min(col_int),max(col_varchar),min(col_num2) from vector_agg_table_06 group by col_date;
select sum(col_int2),sum(col_float),sum(col_num2) from vector_agg_table_06;
select sum(col_int2),sum(col_float),sum(col_num2) from vector_agg_table_06 group by col_int;
select avg(col_int2),avg(col_num),avg(col_float) from vector_agg_table_06;
select avg(col_int2),avg(col_num),avg(col_float) from vector_agg_table_06 group by col_num2;
select count(col_num2),min(col_char),max(col_varchar),sum(col_float),avg(col_num2) from vector_agg_table_06;
select count(col_num2),min(col_char),max(col_varchar),sum(col_float),avg(col_num2) from vector_agg_table_06 group by col_num2;
---
---
explain select count(distinct vector_agg_table_04.*) from vector_agg_table_04;
select count(distinct vector_agg_table_04.*) from vector_agg_table_04; --A.*
select count(vector_agg_engine.vector_agg_table_04.*) from vector_agg_engine.vector_agg_table_04; --A.B.*
select count(regression.vector_agg_engine.vector_agg_table_04.*) from regression.vector_agg_engine.vector_agg_table_04; --A.B.C.*
select count(distinct AA.*) from (select a.*, b.* from vector_agg_table_04 a, vector_agg_table_05 b where a.col_id = b.col_id) AA;
----
--- depend on lineitem_vec
----
select sum(L_QUANTITY) a ,l_returnflag from vector_engine.lineitem_vec group by L_returnflag order by a;
select avg(L_QUANTITY) a, sum(l_quantity) b , l_returnflag from vector_engine.lineitem_vec group by L_returnflag order by a;
explain (verbose on, costs off)
select
l_returnflag,
l_linestatus,
sum(l_quantity) as sum_qty,
sum(l_extendedprice) as sum_base_price,
sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
avg(l_quantity) as avg_qty,
avg(l_extendedprice) as avg_price,
avg(l_discount) as avg_disc,
count(*) as count_order
from
vector_engine.lineitem_vec
where
l_shipdate <= date '1998-12-01' - interval '3 day'
group by
l_returnflag,
l_linestatus
order by
sum_qty
;
select
l_returnflag,
l_linestatus,
sum(l_quantity) as sum_qty,
sum(l_extendedprice) as sum_base_price,
sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
avg(l_quantity) as avg_qty,
avg(l_extendedprice) as avg_price,
avg(l_discount) as avg_disc,
count(*) as count_order
from
vector_engine.lineitem_vec
where
l_shipdate <= date '1998-12-01' - interval '3 day'
group by
l_returnflag,
l_linestatus
order by
sum_qty
;
explain (verbose on, costs off)
select
l_returnflag,
l_linestatus,
sum(l_quantity) as sum_qty,
sum(l_extendedprice) as sum_base_price,
sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
avg(l_quantity) as avg_qty,
avg(l_extendedprice) as avg_price,
avg(l_discount) as avg_disc,
count(*) as count_order
from
vector_engine.lineitem_vec
where
l_shipdate <= date '1998-12-01' - interval '3 day'
group by
l_returnflag,
l_linestatus
order by
l_returnflag,
l_linestatus
;
select
l_returnflag,
l_linestatus,
sum(l_quantity) as sum_qty,
sum(l_extendedprice) as sum_base_price,
sum(l_extendedprice * (1 - l_discount)) as sum_disc_price,
sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge,
avg(l_quantity) as avg_qty,
avg(l_extendedprice) as avg_price,
avg(l_discount) as avg_disc,
count(*) as count_order
from
vector_engine.lineitem_vec
where
l_shipdate <= date '1998-12-01' - interval '3 day'
group by
l_returnflag,
l_linestatus
order by
l_returnflag,
l_linestatus
;
---
---
explain (verbose on, costs off)
select col_smallint ,'',col_numeric2,0,'',0,1,0,0,'' ,col_varchar2 from vector_agg_table_01 where col_integer=2 group by 1,2,3,4,5,8,9,10,11;
select col_smallint ,'',col_numeric2,0,'',0,1,0,0,'' ,col_varchar2 from vector_agg_table_01 where col_integer=2 group by 1,2,3,4,5,8,9,10,11;
select col_smallint,'','' from vector_agg_table_01 where col_integer=2 group by 1,2,3;
explain (verbose on, costs off)
select col_smallint,col_smallint from vector_agg_table_01 where col_integer=2 group by 1,2;
select col_smallint,col_smallint from vector_agg_table_01 where col_integer=2 group by 1,2;
explain (verbose on, costs off)
select col_smallint,'',col_smallint from vector_agg_table_01 where col_integer=2 group by 1,3;
select col_smallint,'',col_smallint from vector_agg_table_01 where col_integer=2 group by 1,3;
----
--- Clean Resource and Tables
----
drop schema vector_agg_engine cascade;
set enable_hashagg = off;
create schema vec;
set current_schema to vec;
--text sort agg and plain agg
create table aggt_col(a numeric, b int, c varchar(10))with(orientation = orc) tablespace hdfs_ts;
insert into aggt_col values(1, 1, 'abc'), (1, 1, 'abc'), (1, 1, 'abc'),(1, 2, 'efg'), (1, 3, 'hij');
select * from aggt_col order by 1, 2, 3;
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select avg(distinct a) from aggt_col;
select avg(distinct b) from aggt_col;
select a, sum(a), avg(distinct a),count(distinct b), sum(b) from aggt_col group by a;
select a, count(distinct b), max(c) from aggt_col group by a;
select a, count(distinct b), sum(b) from aggt_col group by a;
delete from aggt_col;
create table aggt_row(a numeric, b int, c varchar(10));
insert into aggt_row values(1, generate_series(1, 2200), 'agg'||generate_series(1, 2200));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
select a, count(distinct b), count(distinct c) from aggt_col group by a;
insert into aggt_col values(1, 2200, 'agg2200');
select a, count(distinct b), count(distinct c) from aggt_col group by a;
insert into aggt_col values(2, 2200, 'agg2200');
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
delete from aggt_col;
create table aggt_row(a numeric, b int, c varchar(10));
insert into aggt_row values(generate_series(1, 2200), generate_series(1, 2200), 'agg'||generate_series(1, 2200));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
insert into aggt_col values(1, 2201, 'agg2201'), (1, 2202, 'agg2202'), (1, 2203, 'agg2203');
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
insert into aggt_col values(2, 2, 'agg2'), (2, 2, 'agg2'), (2, 2, 'agg2');
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
drop table aggt_col;
create table aggt_col(a numeric, b int, c varchar(10), d int)with(orientation = orc) tablespace hdfs_ts;
create table aggt_row(a numeric, b int, c varchar(10), d int);
insert into aggt_row values(1, 1, 'agg1', generate_series(1, 1000));
insert into aggt_row values(1, 2, 'agg2', generate_series(1, 500));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c), sum(a), sum(b), max(c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c), sum(a), sum(b), max(c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b), sum(a), sum(b) from aggt_col group by c order by 1;
drop table aggt_col;
create table aggt_col(a numeric, b int, c varchar(10), d int)with(orientation = orc) tablespace hdfs_ts;
create table aggt_row(a numeric, b int, c varchar(10), d int);
insert into aggt_row values(1, 1, 'agg1', generate_series(1, 1500));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b) from aggt_col group by c order by 1;
select sum(a) as sum_value, d from aggt_col group by d having sum_value < 0;
drop table aggt_col;
create table aggt_col(a numeric, b int, c varchar(10), d int)with(orientation = orc) tablespace hdfs_ts;
create table aggt_row(a numeric, b int, c varchar(10), d int);
insert into aggt_row values(1, 1, 'agg1', generate_series(1, 2000));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b) from aggt_col group by c order by 1;
insert into aggt_col values(1, 2, 'agg2');
insert into aggt_col values(1, 3, 'agg3');
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b) from aggt_col group by c order by 1;
insert into aggt_col values(10, 20, 'agg30');
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b) from aggt_col group by c order by 1;
delete from aggt_col;
create table aggt_row(a numeric, b int, c varchar(10), d int);
insert into aggt_row values(1, generate_series(1, 500), 'agg'||generate_series(1, 500));
insert into aggt_row values(2, generate_series(1, 1500), 'agg'||generate_series(1, 1500));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
analyze aggt_col;
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c), max(b), max(c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c), max(b), max(c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b), max(b), max(c) from aggt_col group by c order by 1;
delete from aggt_col;
create table aggt_row(a numeric, b int, c varchar(10), d int);
insert into aggt_row values(2, generate_series(1, 1010), 'agg'||generate_series(1, 1010));
insert into aggt_row values(3, generate_series(1, 800), 'agg'||generate_series(1, 800));
insert into aggt_col select * from aggt_row;
drop table aggt_row;
--insert into aggt_col values(4, generate_series(1, 800), 'agg'||generate_series(1, 800));
--insert into aggt_col values(5, generate_series(1, 800), 'agg'||generate_series(1, 800));
--insert into aggt_col values(6, generate_series(1, 800), 'agg'||generate_series(1, 800));
select count(distinct a) from aggt_col;
select count(distinct b) from aggt_col;
select count(distinct c) from aggt_col;
select a, count(distinct b), count(distinct c) from aggt_col group by a order by 1;
select b, count(distinct a), count(distinct c) from aggt_col group by b order by 1;
select c, count(distinct a), count(distinct b) from aggt_col group by c order by 1;
create table aggt_col_numeric(a int, b numeric(10,0), c varchar(10), d int)with(orientation = orc) tablespace hdfs_ts;
create table aggt_row_numeric(a int, b numeric(10,0), c varchar(10), d int);
insert into aggt_row_numeric values(1, 100, 'aggtest', generate_series(1, 2200));
insert into aggt_col_numeric select * from aggt_row_numeric;
drop table aggt_row_numeric;
select count(distinct b) from aggt_col_numeric;
select count(distinct c) from aggt_col_numeric;
select count(distinct b), count(distinct c) from aggt_col_numeric group by a;
select sum(b), avg(b), count(distinct b), count(distinct c) from aggt_col_numeric group by a;
explain (costs off) select sum(b), avg(b), count(distinct b), count(distinct c) from aggt_col_numeric group by a;
drop table aggt_col;
drop table aggt_col_numeric;
create table t1_agg_col(a int, b timetz, c tinterval, d interval, e int)with(orientation = orc) tablespace hdfs_ts;
create table t1_agg_row(a int, b timetz, c tinterval, d interval, e int);
insert into t1_agg_row values(1, '10:11:12', '["Feb 10, 1947 23:59:12" "Jan 14, 1973 03:14:21"]', 1, generate_series(1, 1000));
insert into t1_agg_col select * from t1_agg_row;
drop table t1_agg_row;
select a, count(distinct b), count(distinct c), count(distinct d) from t1_agg_col group by a;
insert into t1_agg_col values(1, '10:11:12', '["Feb 10, 1947 23:59:12" "Jan 14, 1973 03:14:21"]', 1);
select a, count(distinct b), count(distinct c), count(distinct d) from t1_agg_col group by a;
insert into t1_agg_col values(1, '10:11:13', '["Feb 10, 1957 23:59:12" "Jan 14, 1973 03:14:21"]', 2);
select a, count(distinct b), count(distinct c), count(distinct d) from t1_agg_col group by a;
insert into t1_agg_col values(2, '10:11:13', '["Feb 10, 1957 23:59:12" "Jan 14, 1973 03:14:21"]', 2);
select a, count(distinct b), count(distinct c), count(distinct d) from t1_agg_col group by a order by 1;
select count(distinct b), 1 as col1, 'plain agg' as col2 from t1_agg_col order by 1;;
drop table t1_agg_col;
drop schema vec;
reset enable_hashagg; | the_stack |
-- SQL Test Suite, V6.0, Schema Definition, schema8.smi
-- 59-byte ID
-- TEd Version #
-- date_time print
-- ***************************************************************
-- ****** THIS FILE SHOULD BE RUN UNDER AUTHORIZATION ID SUN *****
-- ***************************************************************
-- This file defines the base tables used in most of the CDR tests.
-- This non-standard schema definition is provided so that
-- implementations which require semicolons to terminate statements,
-- but which are otherwise conforming, can still execute the
-- remaining tests.
CREATE SCHEMA
--O AUTHORIZATION SUN;
SUN;
set schema SUN;
CREATE TABLE SUN.ECCO (C1 CHAR(2));
--O CREATE TABLE ECCO (C1 CHAR(2));
CREATE TABLE STAFF
(EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15));
CREATE TABLE PROJ
(PNUM CHAR(3) NOT NULL,
PNAME CHAR(20),
PTYPE CHAR(6),
BUDGET DECIMAL(9),
CITY CHAR(15));
CREATE TABLE WORKS
(EMPNUM CHAR(3) NOT NULL,
PNUM CHAR(3) NOT NULL,
HOURS DECIMAL(5));
CREATE TABLE STAFF3
(EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
UNIQUE (EMPNUM));
CREATE TABLE PROJ3
(PNUM CHAR(3) NOT NULL,
PNAME CHAR(20),
PTYPE CHAR(6),
BUDGET DECIMAL(9),
CITY CHAR(15),
CONSTRAINT PROJ3_UNIQUE UNIQUE (PNUM));
CREATE TABLE WORKS3
(EMPNUM CHAR(3) NOT NULL,
PNUM CHAR(3) NOT NULL,
HOURS DECIMAL(5),
FOREIGN KEY (EMPNUM) REFERENCES STAFF3(EMPNUM),
FOREIGN KEY (PNUM) REFERENCES PROJ3(PNUM));
CREATE TABLE STAFF4 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20) DEFAULT NULL,
--O GRADE DECIMAL(4) DEFAULT 0,
--O CITY CHAR(15) DEFAULT ' ');
GRADE DECIMAL(4) ,
CITY CHAR(15) );
CREATE TABLE STAFF14 (EMPNUM CHAR(3) NOT NULL,
--O EMPNAME CHAR(20) DEFAULT USER,
EMPNAME CHAR(20) ,
-- EMPNAME CHAR precision may be changed to implementation-defined
-- precision for value of USER
GRADE DECIMAL(4),
CITY CHAR(15));
CREATE TABLE STAFF5 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF5_GRADE CHECK (GRADE > 0 AND GRADE < 20));
CREATE TABLE STAFF6 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4)
CONSTRAINT STAFF6_GRADE CHECK (GRADE > 0 AND GRADE < 20),
CITY CHAR(15));
CREATE TABLE STAFF7 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
CONSTRAINT STAFF7_PK PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF7_GRADE CHECK (GRADE BETWEEN 1 AND 20));
CREATE TABLE STAFF8 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF8_EMPNAME CHECK (EMPNAME IS NOT NULL));
CREATE TABLE STAFF9 (EMPNUM CHAR(3) NOT NULL
CONSTRAINT STAFF9_PK PRIMARY KEY,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
CONSTRAINT STAFF9_EMPNAME CHECK (EMPNAME NOT LIKE 'T%'));
CREATE TABLE STAFF10 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF10_GRADE CHECK (GRADE NOT IN (5,22)));
CREATE TABLE STAFF11 (EMPNUM CHAR(3) NOT NULL PRIMARY KEY,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
CONSTRAINT STAFF11_GRADE_EMPNAME
CHECK (GRADE NOT IN (5,22)
AND EMPNAME NOT LIKE 'T%'));
CREATE TABLE STAFF12 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF12_GRADE_EMPNAME
CHECK (NOT GRADE IN (5,22)
AND NOT EMPNAME LIKE 'T%'));
CREATE TABLE STAFF13 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRIMARY KEY (EMPNUM),
CONSTRAINT STAFF13_EMPNAME CHECK (NOT EMPNAME IS NULL));
CREATE TABLE STAFF15 (EMPNUM CHAR(3),
EMPNAME CHAR(20) NOT NULL,
GRADE DECIMAL(4),
CITY CHAR(15));
CREATE TABLE STAFF16 (EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20) DEFAULT NULL,
GRADE DECIMAL(4) NOT NULL CHECK (GRADE IN (100,150,200)),
CITY CHAR(15), PRIMARY KEY (GRADE,EMPNUM));
CREATE TABLE SIZ1_P
(S1 CHAR(3) NOT NULL,
S2 CHAR(3) NOT NULL,
S3 DECIMAL(4) NOT NULL,
S4 CHAR(3) NOT NULL,
S5 DECIMAL(4) NOT NULL,
S6 CHAR(3) NOT NULL,
R1 CHAR(3),
R2 CHAR(3),
R3 DECIMAL(4),
UNIQUE (S1,S2,S3,S4,S5,S6));
CREATE TABLE SIZ1_F
(F1 CHAR(3) NOT NULL,
F2 CHAR(3),
F3 DECIMAL(4),
F4 CHAR(3),
F5 DECIMAL(4),
F6 CHAR(3),
R1 CHAR(3),
R2 DECIMAL(5),
R3 DECIMAL(4),
FOREIGN KEY (F1,F2,F3,F4,F5,F6)
REFERENCES SIZ1_P(S1,S2,S3,S4,S5,S6));
CREATE TABLE SIZ2_P
(P1 CHAR(3) NOT NULL,
P2 CHAR(3) NOT NULL,
P3 DECIMAL(4) NOT NULL,
P4 CHAR(3) NOT NULL,
P5 DECIMAL(4) NOT NULL,
P6 CHAR(3) NOT NULL,
P7 CHAR(3) NOT NULL,
P8 DECIMAL(4) NOT NULL,
P9 DECIMAL(4) NOT NULL,
P10 DECIMAL(4) NOT NULL,
P11 CHAR(4),
UNIQUE (P1),
UNIQUE (P2),
UNIQUE (P3),
UNIQUE (P4),
UNIQUE (P5),
UNIQUE (P6),
UNIQUE (P7),
UNIQUE (P8),
UNIQUE (P9),
UNIQUE (P10));
CREATE TABLE SIZ2_F1
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P1));
CREATE TABLE SIZ2_F2
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P2));
CREATE TABLE SIZ2_F3
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P3));
CREATE TABLE SIZ2_F4
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P4));
CREATE TABLE SIZ2_F5
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P5));
CREATE TABLE SIZ2_F6
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P6));
CREATE TABLE SIZ2_F7
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P7));
CREATE TABLE SIZ2_F8
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P8));
CREATE TABLE SIZ2_F9
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P9));
CREATE TABLE SIZ2_F10
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
FOREIGN KEY (F1)
REFERENCES SIZ2_P(P10));
CREATE TABLE SIZ3_P1
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P2
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P3
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P4
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P5
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P6
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P7
(F1 CHAR(3) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P8
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P9
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_P10
(F1 DECIMAL(4) NOT NULL,
F2 CHAR(8),
UNIQUE (F1));
CREATE TABLE SIZ3_F
(P1 CHAR(3) NOT NULL,
P2 CHAR(3),
P3 DECIMAL(4),
P4 CHAR(3),
P5 DECIMAL(4),
P6 CHAR(3),
P7 CHAR(3),
P8 DECIMAL(4),
P9 DECIMAL(4),
P10 DECIMAL(4),
P11 CHAR(4),
FOREIGN KEY (P1)
REFERENCES SIZ3_P1(F1),
FOREIGN KEY (P2)
REFERENCES SIZ3_P2(F1),
FOREIGN KEY (P3)
REFERENCES SIZ3_P3(F1),
FOREIGN KEY (P4)
REFERENCES SIZ3_P4(F1),
FOREIGN KEY (P5)
REFERENCES SIZ3_P5(F1),
FOREIGN KEY (P6)
REFERENCES SIZ3_P6(F1),
FOREIGN KEY (P7)
REFERENCES SIZ3_P7(F1),
FOREIGN KEY (P8)
REFERENCES SIZ3_P8(F1),
FOREIGN KEY (P9)
REFERENCES SIZ3_P9(F1),
FOREIGN KEY (P10)
REFERENCES SIZ3_P10(F1));
CREATE TABLE DEPT
(DNO DECIMAL(4) NOT NULL,
DNAME CHAR(20) NOT NULL,
DEAN CHAR(30),
PRIMARY KEY (DNO),
UNIQUE (DNAME));
CREATE TABLE EMP
(ENO DECIMAL(4) NOT NULL,
ENAME CHAR(20) NOT NULL,
EDESC CHAR(30),
DNO DECIMAL(4) NOT NULL,
DNAME CHAR(20),
BTH_DATE DECIMAL(6) NOT NULL,
PRIMARY KEY (ENO),
UNIQUE (ENAME,BTH_DATE),
FOREIGN KEY (DNO) REFERENCES
DEPT(DNO),
FOREIGN KEY (DNAME) REFERENCES
DEPT(DNAME));
CREATE TABLE EXPERIENCE
(EXP_NAME CHAR(20),
BTH_DATE DECIMAL(6),
WK_DATE DECIMAL(6),
DESCR CHAR(40),
FOREIGN KEY (EXP_NAME,BTH_DATE) REFERENCES
EMP(ENAME,BTH_DATE));
-- The following tables, STAFF_M and PROJ_M reference each other.
-- Table STAFF_M has a "forward reference" to PROJ_M.
CREATE TABLE STAFF_M
(EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
PRI_WK CHAR(3),
UNIQUE (EMPNUM));
CREATE TABLE PROJ_M
(PNUM CHAR(3) NOT NULL,
PNAME CHAR(20),
PTYPE CHAR(6),
BUDGET DECIMAL(9),
CITY CHAR(15),
MGR CHAR(3),
UNIQUE (PNUM),
FOREIGN KEY (MGR)
REFERENCES STAFF_M(EMPNUM));
ALTER TABLE STAFF_M ADD FOREIGN KEY (PRI_WK)
REFERENCES PROJ_M (PNUM);
-- The following table is self-referencing.
CREATE TABLE STAFF_C
(EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
MGR CHAR(3),
UNIQUE (EMPNUM),
FOREIGN KEY (MGR)
REFERENCES STAFF_C(EMPNUM));
CREATE TABLE STAFF_P
(EMPNUM CHAR(3) NOT NULL,
EMPNAME CHAR(20),
GRADE DECIMAL(4),
CITY CHAR(15),
UNIQUE (EMPNUM));
CREATE TABLE PROJ_P
(PNUM CHAR(3) NOT NULL,
PNAME CHAR(20),
PTYPE CHAR(6),
BUDGET DECIMAL(9),
CITY CHAR(15),
UNIQUE (PNUM));
CREATE TABLE MID1 (P_KEY DECIMAL(4) NOT NULL UNIQUE,
F_KEY DECIMAL(4) REFERENCES MID1(P_KEY));
CREATE TABLE ACR_SCH_P(P1 DECIMAL(4) NOT NULL UNIQUE,
P2 CHAR(4));
--O CREATE TABLE CHAR_DEFAULT
--O (SEX_CODE CHAR(1) DEFAULT 'F',
--O NICKNAME CHAR(20) DEFAULT 'No nickname given',
--O INSURANCE1 CHAR(5) DEFAULT 'basic');
--O CREATE TABLE EXACT_DEF
--O (BODY_TEMP NUMERIC(4,1) DEFAULT 98.6,
--O MAX_NUM NUMERIC(5) DEFAULT -55555,
--O MIN_NUM DEC(6,6) DEFAULT .000001);
--O CREATE TABLE APPROX_DEF
--O (X_COUNT REAL DEFAULT 1.78E12,
--O Y_COUNT REAL DEFAULT -9.99E10,
--O Z_COUNT REAL DEFAULT 3.45E-11,
--O ZZ_COUNT REAL DEFAULT -7.6777E-7);
--O CREATE TABLE SIZE_TAB
--O (COL1 CHAR(75) DEFAULT
--O'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyz0123456789012',
--O COL2 INTEGER DEFAULT -999888777,
--O COL3 DEC(15,6) DEFAULT 987654321.123456,
--O COL4 REAL DEFAULT -1.048576E22);
CREATE TABLE COMMODITY
(C_NUM INTEGER NOT NULL,
C_NAME CHAR(7) NOT NULL UNIQUE,
PRIMARY KEY (C_NUM));
CREATE TABLE CURRENCY_TABLE
(CURRENCY CHAR(10) NOT NULL,
DOLLAR_EQUIV NUMERIC(5, 2),
PRIMARY KEY (CURRENCY));
CREATE TABLE MEASURE_TABLE
(MEASURE CHAR(8) NOT NULL,
POUND_EQUIV NUMERIC(8,2),
PRIMARY KEY (MEASURE));
CREATE TABLE C_TRANSACTION
(COMMOD_NO INTEGER,
TOT_PRICE DECIMAL(12,2),
CURRENCY CHAR(10),
UNITS INTEGER,
MEASURE CHAR(8),
T_DATE INTEGER,
FOREIGN KEY (COMMOD_NO)
REFERENCES COMMODITY,
FOREIGN KEY (CURRENCY)
REFERENCES CURRENCY_TABLE,
FOREIGN KEY (MEASURE)
REFERENCES MEASURE_TABLE);
CREATE TABLE T6118REF (
COL1 CHAR(20) NOT NULL, COL2 CHAR(20) NOT NULL,
COL3 CHAR(20) NOT NULL, COL4 CHAR(20) NOT NULL,
COL5 CHAR(23) NOT NULL, COL6 NUMERIC (4) NOT NULL,
STR118 CHAR(118) NOT NULL UNIQUE,
UNIQUE (COL1, COL2, COL4, COL3, COL5, COL6));
CREATE TABLE T118(STR118 CHAR(118) NOT NULL UNIQUE,
FOREIGN KEY (STR118) REFERENCES T6118REF (STR118));
CREATE TABLE T6 (COL1 CHAR(20), COL2 CHAR(20),
COL3 CHAR(20), COL4 CHAR(20),
COL5 CHAR(23), COL6 NUMERIC (4),
FOREIGN KEY (COL1, COL2, COL4, COL3, COL5, COL6)
REFERENCES T6118REF (COL1, COL2, COL4, COL3, COL5, COL6));
-- ********************** create view statements *****************
CREATE VIEW TESTREPORT AS
SELECT TESTNO, RESULT, TESTTYPE
FROM HU.TESTREPORT;
--O FROM TESTREPORT;
--O CREATE VIEW DOLLARS_PER_POUND (COMMODITY, UNIT_PRICE, FROM_DATE, TO_DATE)
--O AS SELECT COMMODITY.C_NAME,
--O SUM(TOT_PRICE * DOLLAR_EQUIV) / SUM(UNITS * POUND_EQUIV),
--O MIN(T_DATE), MAX(T_DATE)
--O FROM C_TRANSACTION, COMMODITY, CURRENCY_TABLE, MEASURE_TABLE
--O WHERE C_TRANSACTION.COMMOD_NO = COMMODITY.C_NUM
--O AND C_TRANSACTION.CURRENCY = CURRENCY_TABLE.CURRENCY
--O AND C_TRANSACTION.MEASURE = MEASURE_TABLE.MEASURE
--O GROUP BY COMMODITY.C_NAME
--O HAVING SUM(TOT_PRICE * DOLLAR_EQUIV) > 10000;
-- View COST_PER_UNIT for OPTIONAL test 0403
-- Remove view from schema if it causes errors.
--O CREATE VIEW COST_PER_UNIT
--O (COMMODITY, UNIT_PRICE, CURRENCY, MEASURE)
--O AS SELECT COMMODITY, UNIT_PRICE * POUND_EQUIV / DOLLAR_EQUIV,
--O CURRENCY, MEASURE
--O FROM DOLLARS_PER_POUND, CURRENCY_TABLE, MEASURE_TABLE;
CREATE VIEW STAFF6_WITH_GRADES AS
SELECT EMPNUM,EMPNAME,GRADE,CITY
FROM STAFF6
WHERE GRADE > 0 AND GRADE < 20
;
--O WITH CHECK OPTION;
-- ************** grant statements follow *************
--O GRANT SELECT ON SUN.ECCO TO PUBLIC;
--O GRANT INSERT ON TESTREPORT
--O TO PUBLIC;
--O GRANT REFERENCES ON ACR_SCH_P TO SULLIVAN
--O WITH GRANT OPTION;
--O GRANT ALL PRIVILEGES ON PROJ_P
--O TO SULLIVAN;
--O GRANT ALL PRIVILEGES ON T6118REF TO FLATER;
--O GRANT ALL PRIVILEGES ON T118 TO FLATER;
--O GRANT ALL PRIVILEGES ON T6 TO FLATER;
-- Test GRANT without grant permission below.
-- "WITH GRANT OPTION" purposefully omitted from SUN's GRANT.
-- Do not insert text "WITH GRANT OPTION"
--O GRANT REFERENCES ON STAFF_P
--O TO SULLIVAN;
--O GRANT REFERENCES (C_NUM) ON COMMODITY TO SCHANZLE;
-- ************* End of Schema ************* | the_stack |
CREATE TABLE brintest (byteacol bytea,
charcol "char",
namecol name,
int8col bigint,
int2col smallint,
int4col integer,
textcol text,
oidcol oid,
tidcol tid,
float4col real,
float8col double precision,
macaddrcol macaddr,
inetcol inet,
cidrcol cidr,
bpcharcol character,
datecol date,
timecol time without time zone,
timestampcol timestamp without time zone,
timestamptzcol timestamp with time zone,
intervalcol interval,
timetzcol time with time zone,
bitcol bit(10),
varbitcol bit varying(16),
numericcol numeric,
uuidcol uuid,
int4rangecol int4range,
lsncol pg_lsn,
boxcol box
) WITH (fillfactor=10, autovacuum_enabled=off);
INSERT INTO brintest SELECT
repeat(stringu1, 8)::bytea,
substr(stringu1, 1, 1)::"char",
stringu1::name, 142857 * tenthous,
thousand,
twothousand,
repeat(stringu1, 8),
unique1::oid,
format('(%s,%s)', tenthous, twenty)::tid,
(four + 1.0)/(hundred+1),
odd::float8 / (tenthous + 1),
format('%s:00:%s:00:%s:00', to_hex(odd), to_hex(even), to_hex(hundred))::macaddr,
inet '10.2.3.4/24' + tenthous,
cidr '10.2.3/24' + tenthous,
substr(stringu1, 1, 1)::bpchar,
date '1995-08-15' + tenthous,
time '01:20:30' + thousand * interval '18.5 second',
timestamp '1942-07-23 03:05:09' + tenthous * interval '36.38 hours',
timestamptz '1972-10-10 03:00' + thousand * interval '1 hour',
justify_days(justify_hours(tenthous * interval '12 minutes')),
timetz '01:30:20+02' + hundred * interval '15 seconds',
thousand::bit(10),
tenthous::bit(16)::varbit,
tenthous::numeric(36,30) * fivethous * even / (hundred + 1),
format('%s%s-%s-%s-%s-%s%s%s', to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'))::uuid,
int4range(thousand, twothousand),
format('%s/%s%s', odd, even, tenthous)::pg_lsn,
box(point(odd, even), point(thousand, twothousand))
FROM tenk1 ORDER BY unique2 LIMIT 100;
-- throw in some NULL's and different values
INSERT INTO brintest (inetcol, cidrcol, int4rangecol) SELECT
inet 'fe80::6e40:8ff:fea9:8c46' + tenthous,
cidr 'fe80::6e40:8ff:fea9:8c46' + tenthous,
'empty'::int4range
FROM tenk1 ORDER BY thousand, tenthous LIMIT 25;
CREATE INDEX brinidx ON brintest USING brin (
byteacol,
charcol,
namecol,
int8col,
int2col,
int4col,
textcol,
oidcol,
tidcol,
float4col,
float8col,
macaddrcol,
inetcol inet_inclusion_ops,
inetcol inet_minmax_ops,
cidrcol inet_inclusion_ops,
cidrcol inet_minmax_ops,
bpcharcol,
datecol,
timecol,
timestampcol,
timestamptzcol,
intervalcol,
timetzcol,
bitcol,
varbitcol,
numericcol,
uuidcol,
int4rangecol,
lsncol,
boxcol
) with (pages_per_range = 1);
CREATE TABLE brinopers (colname name, typ text,
op text[], value text[], matches int[],
check (cardinality(op) = cardinality(value)),
check (cardinality(op) = cardinality(matches)));
INSERT INTO brinopers VALUES
('byteacol', 'bytea',
'{>, >=, =, <=, <}',
'{AAAAAA, AAAAAA, BNAAAABNAAAABNAAAABNAAAABNAAAABNAAAABNAAAABNAAAA, ZZZZZZ, ZZZZZZ}',
'{100, 100, 1, 100, 100}'),
('charcol', '"char"',
'{>, >=, =, <=, <}',
'{A, A, M, Z, Z}',
'{97, 100, 6, 100, 98}'),
('namecol', 'name',
'{>, >=, =, <=, <}',
'{AAAAAA, AAAAAA, MAAAAA, ZZAAAA, ZZAAAA}',
'{100, 100, 2, 100, 100}'),
('int2col', 'int2',
'{>, >=, =, <=, <}',
'{0, 0, 800, 999, 999}',
'{100, 100, 1, 100, 100}'),
('int2col', 'int4',
'{>, >=, =, <=, <}',
'{0, 0, 800, 999, 1999}',
'{100, 100, 1, 100, 100}'),
('int2col', 'int8',
'{>, >=, =, <=, <}',
'{0, 0, 800, 999, 1428427143}',
'{100, 100, 1, 100, 100}'),
('int4col', 'int2',
'{>, >=, =, <=, <}',
'{0, 0, 800, 1999, 1999}',
'{100, 100, 1, 100, 100}'),
('int4col', 'int4',
'{>, >=, =, <=, <}',
'{0, 0, 800, 1999, 1999}',
'{100, 100, 1, 100, 100}'),
('int4col', 'int8',
'{>, >=, =, <=, <}',
'{0, 0, 800, 1999, 1428427143}',
'{100, 100, 1, 100, 100}'),
('int8col', 'int2',
'{>, >=}',
'{0, 0}',
'{100, 100}'),
('int8col', 'int4',
'{>, >=}',
'{0, 0}',
'{100, 100}'),
('int8col', 'int8',
'{>, >=, =, <=, <}',
'{0, 0, 1257141600, 1428427143, 1428427143}',
'{100, 100, 1, 100, 100}'),
('textcol', 'text',
'{>, >=, =, <=, <}',
'{ABABAB, ABABAB, BNAAAABNAAAABNAAAABNAAAABNAAAABNAAAABNAAAABNAAAA, ZZAAAA, ZZAAAA}',
'{100, 100, 1, 100, 100}'),
('oidcol', 'oid',
'{>, >=, =, <=, <}',
'{0, 0, 8800, 9999, 9999}',
'{100, 100, 1, 100, 100}'),
('tidcol', 'tid',
'{>, >=, =, <=, <}',
'{"(0,0)", "(0,0)", "(8800,0)", "(9999,19)", "(9999,19)"}',
'{100, 100, 1, 100, 100}'),
('float4col', 'float4',
'{>, >=, =, <=, <}',
'{0.0103093, 0.0103093, 1, 1, 1}',
'{100, 100, 4, 100, 96}'),
('float4col', 'float8',
'{>, >=, =, <=, <}',
'{0.0103093, 0.0103093, 1, 1, 1}',
'{100, 100, 4, 100, 96}'),
('float8col', 'float4',
'{>, >=, =, <=, <}',
'{0, 0, 0, 1.98, 1.98}',
'{99, 100, 1, 100, 100}'),
('float8col', 'float8',
'{>, >=, =, <=, <}',
'{0, 0, 0, 1.98, 1.98}',
'{99, 100, 1, 100, 100}'),
('macaddrcol', 'macaddr',
'{>, >=, =, <=, <}',
'{00:00:01:00:00:00, 00:00:01:00:00:00, 2c:00:2d:00:16:00, ff:fe:00:00:00:00, ff:fe:00:00:00:00}',
'{99, 100, 2, 100, 100}'),
('inetcol', 'inet',
'{&&, =, <, <=, >, >=, >>=, >>, <<=, <<}',
'{10/8, 10.2.14.231/24, 255.255.255.255, 255.255.255.255, 0.0.0.0, 0.0.0.0, 10.2.14.231/24, 10.2.14.231/25, 10.2.14.231/8, 0/0}',
'{100, 1, 100, 100, 125, 125, 2, 2, 100, 100}'),
('inetcol', 'inet',
'{&&, >>=, <<=, =}',
'{fe80::6e40:8ff:fea9:a673/32, fe80::6e40:8ff:fea9:8c46, fe80::6e40:8ff:fea9:a673/32, fe80::6e40:8ff:fea9:8c46}',
'{25, 1, 25, 1}'),
('inetcol', 'cidr',
'{&&, <, <=, >, >=, >>=, >>, <<=, <<}',
'{10/8, 255.255.255.255, 255.255.255.255, 0.0.0.0, 0.0.0.0, 10.2.14/24, 10.2.14/25, 10/8, 0/0}',
'{100, 100, 100, 125, 125, 2, 2, 100, 100}'),
('inetcol', 'cidr',
'{&&, >>=, <<=, =}',
'{fe80::/32, fe80::6e40:8ff:fea9:8c46, fe80::/32, fe80::6e40:8ff:fea9:8c46}',
'{25, 1, 25, 1}'),
('cidrcol', 'inet',
'{&&, =, <, <=, >, >=, >>=, >>, <<=, <<}',
'{10/8, 10.2.14/24, 255.255.255.255, 255.255.255.255, 0.0.0.0, 0.0.0.0, 10.2.14.231/24, 10.2.14.231/25, 10.2.14.231/8, 0/0}',
'{100, 2, 100, 100, 125, 125, 2, 2, 100, 100}'),
('cidrcol', 'inet',
'{&&, >>=, <<=, =}',
'{fe80::6e40:8ff:fea9:a673/32, fe80::6e40:8ff:fea9:8c46, fe80::6e40:8ff:fea9:a673/32, fe80::6e40:8ff:fea9:8c46}',
'{25, 1, 25, 1}'),
('cidrcol', 'cidr',
'{&&, =, <, <=, >, >=, >>=, >>, <<=, <<}',
'{10/8, 10.2.14/24, 255.255.255.255, 255.255.255.255, 0.0.0.0, 0.0.0.0, 10.2.14/24, 10.2.14/25, 10/8, 0/0}',
'{100, 2, 100, 100, 125, 125, 2, 2, 100, 100}'),
('cidrcol', 'cidr',
'{&&, >>=, <<=, =}',
'{fe80::/32, fe80::6e40:8ff:fea9:8c46, fe80::/32, fe80::6e40:8ff:fea9:8c46}',
'{25, 1, 25, 1}'),
('bpcharcol', 'bpchar',
'{>, >=, =, <=, <}',
'{A, A, W, Z, Z}',
'{97, 100, 6, 100, 98}'),
('datecol', 'date',
'{>, >=, =, <=, <}',
'{1995-08-15, 1995-08-15, 2009-12-01, 2022-12-30, 2022-12-30}',
'{100, 100, 1, 100, 100}'),
('timecol', 'time',
'{>, >=, =, <=, <}',
'{01:20:30, 01:20:30, 02:28:57, 06:28:31.5, 06:28:31.5}',
'{100, 100, 1, 100, 100}'),
('timestampcol', 'timestamp',
'{>, >=, =, <=, <}',
'{1942-07-23 03:05:09, 1942-07-23 03:05:09, 1964-03-24 19:26:45, 1984-01-20 22:42:21, 1984-01-20 22:42:21}',
'{100, 100, 1, 100, 100}'),
('timestampcol', 'timestamptz',
'{>, >=, =, <=, <}',
'{1942-07-23 03:05:09, 1942-07-23 03:05:09, 1964-03-24 19:26:45, 1984-01-20 22:42:21, 1984-01-20 22:42:21}',
'{100, 100, 1, 100, 100}'),
('timestamptzcol', 'timestamptz',
'{>, >=, =, <=, <}',
'{1972-10-10 03:00:00-04, 1972-10-10 03:00:00-04, 1972-10-19 09:00:00-07, 1972-11-20 19:00:00-03, 1972-11-20 19:00:00-03}',
'{100, 100, 1, 100, 100}'),
('intervalcol', 'interval',
'{>, >=, =, <=, <}',
'{00:00:00, 00:00:00, 1 mons 13 days 12:24, 2 mons 23 days 07:48:00, 1 year}',
'{100, 100, 1, 100, 100}'),
('timetzcol', 'timetz',
'{>, >=, =, <=, <}',
'{01:30:20+02, 01:30:20+02, 01:35:50+02, 23:55:05+02, 23:55:05+02}',
'{99, 100, 2, 100, 100}'),
('bitcol', 'bit(10)',
'{>, >=, =, <=, <}',
'{0000000010, 0000000010, 0011011110, 1111111000, 1111111000}',
'{100, 100, 1, 100, 100}'),
('varbitcol', 'varbit(16)',
'{>, >=, =, <=, <}',
'{0000000000000100, 0000000000000100, 0001010001100110, 1111111111111000, 1111111111111000}',
'{100, 100, 1, 100, 100}'),
('numericcol', 'numeric',
'{>, >=, =, <=, <}',
'{0.00, 0.01, 2268164.347826086956521739130434782609, 99470151.9, 99470151.9}',
'{100, 100, 1, 100, 100}'),
('uuidcol', 'uuid',
'{>, >=, =, <=, <}',
'{00040004-0004-0004-0004-000400040004, 00040004-0004-0004-0004-000400040004, 52225222-5222-5222-5222-522252225222, 99989998-9998-9998-9998-999899989998, 99989998-9998-9998-9998-999899989998}',
'{100, 100, 1, 100, 100}'),
('int4rangecol', 'int4range',
'{<<, &<, &&, &>, >>, @>, <@, =, <, <=, >, >=}',
'{"[10000,)","[10000,)","(,]","[3,4)","[36,44)","(1500,1501]","[3,4)","[222,1222)","[36,44)","[43,1043)","[367,4466)","[519,)"}',
'{53, 53, 53, 53, 50, 22, 72, 1, 74, 75, 34, 21}'),
('int4rangecol', 'int4range',
'{@>, <@, =, <=, >, >=}',
'{empty, empty, empty, empty, empty, empty}',
'{125, 72, 72, 72, 53, 125}'),
('int4rangecol', 'int4',
'{@>}',
'{1500}',
'{22}'),
('lsncol', 'pg_lsn',
'{>, >=, =, <=, <, IS, IS NOT}',
'{0/1200, 0/1200, 44/455222, 198/1999799, 198/1999799, NULL, NULL}',
'{100, 100, 1, 100, 100, 25, 100}'),
('boxcol', 'point',
'{@>}',
'{"(500,43)"}',
'{11}'),
('boxcol', 'box',
'{<<, &<, &&, &>, >>, <<|, &<|, |&>, |>>, @>, <@, ~=}',
'{"((1000,2000),(3000,4000))","((1,2),(3000,4000))","((1,2),(3000,4000))","((1,2),(3000,4000))","((1,2),(3,4))","((1000,2000),(3000,4000))","((1,2000),(3,4000))","((1000,2),(3000,4))","((1,2),(3,4))","((1,2),(300,400))","((1,2),(3000,4000))","((222,1222),(44,45))"}',
'{100, 100, 100, 99, 96, 100, 100, 99, 96, 1, 99, 1}');
DO $x$
DECLARE
r record;
r2 record;
cond text;
idx_ctids tid[];
ss_ctids tid[];
count int;
plan_ok bool;
plan_line text;
BEGIN
FOR r IN SELECT colname, oper, typ, value[ordinality], matches[ordinality] FROM brinopers, unnest(op) WITH ORDINALITY AS oper LOOP
-- prepare the condition
IF r.value IS NULL THEN
cond := format('%I %s %L', r.colname, r.oper, r.value);
ELSE
cond := format('%I %s %L::%s', r.colname, r.oper, r.value, r.typ);
END IF;
-- run the query using the brin index
SET enable_seqscan = 0;
SET enable_bitmapscan = 1;
plan_ok := false;
FOR plan_line IN EXECUTE format($y$EXPLAIN SELECT array_agg(ctid) FROM brintest WHERE %s $y$, cond) LOOP
IF plan_line LIKE '%Bitmap Heap Scan on brintest%' THEN
plan_ok := true;
END IF;
END LOOP;
IF NOT plan_ok THEN
RAISE WARNING 'did not get bitmap indexscan plan for %', r;
END IF;
EXECUTE format($y$SELECT array_agg(ctid) FROM brintest WHERE %s $y$, cond)
INTO idx_ctids;
-- run the query using a seqscan
SET enable_seqscan = 1;
SET enable_bitmapscan = 0;
plan_ok := false;
FOR plan_line IN EXECUTE format($y$EXPLAIN SELECT array_agg(ctid) FROM brintest WHERE %s $y$, cond) LOOP
IF plan_line LIKE '%Seq Scan on brintest%' THEN
plan_ok := true;
END IF;
END LOOP;
IF NOT plan_ok THEN
RAISE WARNING 'did not get seqscan plan for %', r;
END IF;
EXECUTE format($y$SELECT array_agg(ctid) FROM brintest WHERE %s $y$, cond)
INTO ss_ctids;
-- make sure both return the same results
count := array_length(idx_ctids, 1);
IF NOT (count = array_length(ss_ctids, 1) AND
idx_ctids @> ss_ctids AND
idx_ctids <@ ss_ctids) THEN
-- report the results of each scan to make the differences obvious
RAISE WARNING 'something not right in %: count %', r, count;
SET enable_seqscan = 1;
SET enable_bitmapscan = 0;
FOR r2 IN EXECUTE 'SELECT ' || r.colname || ' FROM brintest WHERE ' || cond LOOP
RAISE NOTICE 'seqscan: %', r2;
END LOOP;
SET enable_seqscan = 0;
SET enable_bitmapscan = 1;
FOR r2 IN EXECUTE 'SELECT ' || r.colname || ' FROM brintest WHERE ' || cond LOOP
RAISE NOTICE 'bitmapscan: %', r2;
END LOOP;
END IF;
-- make sure we found expected number of matches
IF count != r.matches THEN RAISE WARNING 'unexpected number of results % for %', count, r; END IF;
END LOOP;
END;
$x$;
RESET enable_seqscan;
RESET enable_bitmapscan;
INSERT INTO brintest SELECT
repeat(stringu1, 42)::bytea,
substr(stringu1, 1, 1)::"char",
stringu1::name, 142857 * tenthous,
thousand,
twothousand,
repeat(stringu1, 42),
unique1::oid,
format('(%s,%s)', tenthous, twenty)::tid,
(four + 1.0)/(hundred+1),
odd::float8 / (tenthous + 1),
format('%s:00:%s:00:%s:00', to_hex(odd), to_hex(even), to_hex(hundred))::macaddr,
inet '10.2.3.4' + tenthous,
cidr '10.2.3/24' + tenthous,
substr(stringu1, 1, 1)::bpchar,
date '1995-08-15' + tenthous,
time '01:20:30' + thousand * interval '18.5 second',
timestamp '1942-07-23 03:05:09' + tenthous * interval '36.38 hours',
timestamptz '1972-10-10 03:00' + thousand * interval '1 hour',
justify_days(justify_hours(tenthous * interval '12 minutes')),
timetz '01:30:20' + hundred * interval '15 seconds',
thousand::bit(10),
tenthous::bit(16)::varbit,
tenthous::numeric(36,30) * fivethous * even / (hundred + 1),
format('%s%s-%s-%s-%s-%s%s%s', to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'), to_char(tenthous, 'FM0000'))::uuid,
int4range(thousand, twothousand),
format('%s/%s%s', odd, even, tenthous)::pg_lsn,
box(point(odd, even), point(thousand, twothousand))
FROM tenk1 ORDER BY unique2 LIMIT 5 OFFSET 5;
SELECT brin_desummarize_range('brinidx', 0);
VACUUM brintest; -- force a summarization cycle in brinidx
UPDATE brintest SET int8col = int8col * int4col;
UPDATE brintest SET textcol = '' WHERE textcol IS NOT NULL;
-- Tests for brin_summarize_new_values
SELECT brin_summarize_new_values('brintest'); -- error, not an index
SELECT brin_summarize_new_values('tenk1_unique1'); -- error, not a BRIN index
SELECT brin_summarize_new_values('brinidx'); -- ok, no change expected
-- Tests for brin_desummarize_range
SELECT brin_desummarize_range('brinidx', -1); -- error, invalid range
SELECT brin_desummarize_range('brinidx', 0);
SELECT brin_desummarize_range('brinidx', 0);
SELECT brin_desummarize_range('brinidx', 100000000);
-- Test brin_summarize_range
CREATE TABLE brin_summarize (
value int
) WITH (fillfactor=10, autovacuum_enabled=false);
CREATE INDEX brin_summarize_idx ON brin_summarize USING brin (value) WITH (pages_per_range=2);
-- Fill a few pages
DO $$
DECLARE curtid tid;
BEGIN
LOOP
INSERT INTO brin_summarize VALUES (1) RETURNING ctid INTO curtid;
EXIT WHEN curtid > tid '(2, 0)';
END LOOP;
END;
$$;
-- summarize one range
SELECT brin_summarize_range('brin_summarize_idx', 0);
-- nothing: already summarized
SELECT brin_summarize_range('brin_summarize_idx', 1);
-- summarize one range
SELECT brin_summarize_range('brin_summarize_idx', 2);
-- nothing: page doesn't exist in table
SELECT brin_summarize_range('brin_summarize_idx', 4294967295);
-- invalid block number values
SELECT brin_summarize_range('brin_summarize_idx', -1);
SELECT brin_summarize_range('brin_summarize_idx', 4294967296);
-- test brin cost estimates behave sanely based on correlation of values
CREATE TABLE brin_test (a INT, b INT);
INSERT INTO brin_test SELECT x/100,x%100 FROM generate_series(1,10000) x(x);
CREATE INDEX brin_test_a_idx ON brin_test USING brin (a) WITH (pages_per_range = 2);
CREATE INDEX brin_test_b_idx ON brin_test USING brin (b) WITH (pages_per_range = 2);
VACUUM ANALYZE brin_test;
-- Ensure brin index is used when columns are perfectly correlated
EXPLAIN (COSTS OFF) SELECT * FROM brin_test WHERE a = 1;
-- Ensure brin index is not used when values are not correlated
EXPLAIN (COSTS OFF) SELECT * FROM brin_test WHERE b = 1;
-- make sure data are properly de-toasted in BRIN index
CREATE TABLE brintest_3 (a text, b text, c text, d text);
-- long random strings (~2000 chars each, so ~6kB for min/max on two
-- columns) to trigger toasting
WITH rand_value AS (SELECT string_agg(md5(i::text),'') AS val FROM generate_series(1,60) s(i))
INSERT INTO brintest_3
SELECT val, val, val, val FROM rand_value;
CREATE INDEX brin_test_toast_idx ON brintest_3 USING brin (b, c);
DELETE FROM brintest_3;
-- We need to wait a bit for all transactions to complete, so that the
-- vacuum actually removes the TOAST rows. Creating an index concurrently
-- is a one way to achieve that, because it does exactly such wait.
CREATE INDEX CONCURRENTLY brin_test_temp_idx ON brintest_3(a);
DROP INDEX brin_test_temp_idx;
-- vacuum the table, to discard TOAST data
VACUUM brintest_3;
-- retry insert with a different random-looking (but deterministic) value
-- the value is different, and so should replace either min or max in the
-- brin summary
WITH rand_value AS (SELECT string_agg(md5((-i)::text),'') AS val FROM generate_series(1,60) s(i))
INSERT INTO brintest_3
SELECT val, val, val, val FROM rand_value;
-- now try some queries, accessing the brin index
SET enable_seqscan = off;
EXPLAIN (COSTS OFF)
SELECT * FROM brintest_3 WHERE b < '0';
SELECT * FROM brintest_3 WHERE b < '0';
DROP TABLE brintest_3;
RESET enable_seqscan; | the_stack |
-- 2017-09-02T16:58:53.909
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Periode Nr.',Updated=TO_TIMESTAMP('2017-09-02 16:58:53','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558771
;
-- 2017-09-02T16:59:00.186
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Perioden Art',Updated=TO_TIMESTAMP('2017-09-02 16:59:00','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558776
;
-- 2017-09-02T16:59:05.376
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Verarbeiten',Updated=TO_TIMESTAMP('2017-09-02 16:59:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558777
;
-- 2017-09-02T16:59:54.859
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET WidgetSize='S',Updated=TO_TIMESTAMP('2017-09-02 16:59:54','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546072
;
-- 2017-09-02T16:59:59.066
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET WidgetSize='S',Updated=TO_TIMESTAMP('2017-09-02 16:59:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546069
;
-- 2017-09-02T17:00:55.722
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET WidgetSize='M',Updated=TO_TIMESTAMP('2017-09-02 17:00:55','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546070
;
-- 2017-09-02T17:01:05.699
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=20,Updated=TO_TIMESTAMP('2017-09-02 17:01:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546072
;
-- 2017-09-02T17:01:09.443
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=30,Updated=TO_TIMESTAMP('2017-09-02 17:01:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546070
;
-- 2017-09-02T17:01:12.352
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=40,Updated=TO_TIMESTAMP('2017-09-02 17:01:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546071
;
-- 2017-09-02T17:01:48.048
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_ElementGroup SET SeqNo=20,Updated=TO_TIMESTAMP('2017-09-02 17:01:48','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_ElementGroup_ID=540757
;
-- 2017-09-02T17:01:50.761
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_ElementGroup SET SeqNo=30,Updated=TO_TIMESTAMP('2017-09-02 17:01:50','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_ElementGroup_ID=540756
;
-- 2017-09-02T17:02:08.090
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=546078
;
-- 2017-09-02T17:02:11.802
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_ElementGroup WHERE AD_UI_ElementGroup_ID=540757
;
-- 2017-09-02T17:02:34.903
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET AD_UI_ElementGroup_ID=540755, SeqNo=20,Updated=TO_TIMESTAMP('2017-09-02 17:02:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546073
;
-- 2017-09-02T17:02:45.789
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET AD_UI_ElementGroup_ID=540755, SeqNo=30,Updated=TO_TIMESTAMP('2017-09-02 17:02:45','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546074
;
-- 2017-09-02T17:02:49.922
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_ElementGroup WHERE AD_UI_ElementGroup_ID=540754
;
-- 2017-09-02T17:03:03.255
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_ElementGroup SET SeqNo=20,Updated=TO_TIMESTAMP('2017-09-02 17:03:03','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_ElementGroup_ID=540756
;
-- 2017-09-02T17:04:12.739
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=10,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546072
;
-- 2017-09-02T17:04:12.741
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=20,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546069
;
-- 2017-09-02T17:04:12.743
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=30,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546070
;
-- 2017-09-02T17:04:12.745
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=40,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546071
;
-- 2017-09-02T17:04:12.747
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=50,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546075
;
-- 2017-09-02T17:04:12.752
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=60,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546073
;
-- 2017-09-02T17:04:12.754
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=70,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546074
;
-- 2017-09-02T17:04:12.756
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=80,Updated=TO_TIMESTAMP('2017-09-02 17:04:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546076
;
-- 2017-09-02T17:04:34.699
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed_SideList='Y', SeqNo_SideList=10,Updated=TO_TIMESTAMP('2017-09-02 17:04:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546072
;
-- 2017-09-02T17:04:34.704
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed_SideList='Y', SeqNo_SideList=20,Updated=TO_TIMESTAMP('2017-09-02 17:04:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546069
;
-- 2017-09-02T17:04:34.708
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed_SideList='Y', SeqNo_SideList=30,Updated=TO_TIMESTAMP('2017-09-02 17:04:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546070
;
-- 2017-09-02T17:04:34.713
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed_SideList='Y', SeqNo_SideList=40,Updated=TO_TIMESTAMP('2017-09-02 17:04:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546074
;
-- 2017-09-02T17:04:34.717
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed_SideList='Y', SeqNo_SideList=50,Updated=TO_TIMESTAMP('2017-09-02 17:04:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546076
;
-- 2017-09-02T17:05:08.582
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Periode Status',Updated=TO_TIMESTAMP('2017-09-02 17:05:08','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558784
;
-- 2017-09-02T17:05:14.509
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Perioden Aktion',Updated=TO_TIMESTAMP('2017-09-02 17:05:14','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558785
;
-- 2017-09-02T17:05:19.943
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET Name='Verarbeiten',Updated=TO_TIMESTAMP('2017-09-02 17:05:19','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558786
;
-- 2017-09-02T17:07:13.103
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET SortNo=-1.000000000000,Updated=TO_TIMESTAMP('2017-09-02 17:07:13','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558770
;
-- 2017-09-02T17:07:18.567
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET SortNo=2.000000000000,Updated=TO_TIMESTAMP('2017-09-02 17:07:18','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=558771
;
-- 2017-09-02T17:07:59.162
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET WidgetSize='M',Updated=TO_TIMESTAMP('2017-09-02 17:07:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546071
;
-- 2017-09-02T17:12:49.233
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:12:49','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Client',Description='',Help='' WHERE AD_Field_ID=558768 AND AD_Language='en_US'
;
-- 2017-09-02T17:13:03.681
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:13:03','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Organisation',Description='',Help='' WHERE AD_Field_ID=558769 AND AD_Language='nl_NL'
;
-- 2017-09-02T17:13:20.144
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:13:20','YYYY-MM-DD HH24:MI:SS'),IsTranslated='N',Name='Sektion' WHERE AD_Field_ID=558769 AND AD_Language='nl_NL'
;
-- 2017-09-02T17:13:38.257
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:13:38','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Organisation',Description='',Help='' WHERE AD_Field_ID=558769 AND AD_Language='en_US'
;
-- 2017-09-02T17:13:54.659
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:13:54','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Year',Description='',Help='' WHERE AD_Field_ID=558770 AND AD_Language='en_US'
;
-- 2017-09-02T17:14:11.321
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:14:11','YYYY-MM-DD HH24:MI:SS'),Name='Period No.' WHERE AD_Field_ID=558771 AND AD_Language='en_US'
;
-- 2017-09-02T17:14:18.514
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:14:18','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558772 AND AD_Language='en_US'
;
-- 2017-09-02T17:14:31.071
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:14:31','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Active',Description='',Help='' WHERE AD_Field_ID=558773 AND AD_Language='en_US'
;
-- 2017-09-02T17:14:46.810
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:14:46','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Start Date' WHERE AD_Field_ID=558774 AND AD_Language='en_US'
;
-- 2017-09-02T17:14:56.698
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:14:56','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='End Date' WHERE AD_Field_ID=558775 AND AD_Language='en_US'
;
-- 2017-09-02T17:15:04.051
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:15:04','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558776 AND AD_Language='en_US'
;
-- 2017-09-02T17:15:10.027
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:15:10','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558777 AND AD_Language='en_US'
;
-- 2017-09-02T17:15:22.855
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:15:22','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Period',Description='',Help='' WHERE AD_Field_ID=558767 AND AD_Language='en_US'
;
-- 2017-09-02T17:15:39.528
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:15:39','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Client',Description='',Help='' WHERE AD_Field_ID=558779 AND AD_Language='en_US'
;
-- 2017-09-02T17:16:01.724
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:16:01','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Organisation',Description='',Help='' WHERE AD_Field_ID=558780 AND AD_Language='en_US'
;
-- 2017-09-02T17:16:12.115
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:16:12','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Period',Description='',Help='' WHERE AD_Field_ID=558781 AND AD_Language='en_US'
;
-- 2017-09-02T17:16:50.052
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:16:50','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Active',Description='',Help='' WHERE AD_Field_ID=558783 AND AD_Language='en_US'
;
-- 2017-09-02T17:17:05.132
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:17:05','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558784 AND AD_Language='en_US'
;
-- 2017-09-02T17:17:35.429
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:17:35','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558785 AND AD_Language='en_US'
;
-- 2017-09-02T17:17:55.990
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:17:55','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y' WHERE AD_Field_ID=558786 AND AD_Language='en_US'
;
-- 2017-09-02T17:18:14.430
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field_Trl SET UpdatedBy=100,Updated=TO_TIMESTAMP('2017-09-02 17:18:14','YYYY-MM-DD HH24:MI:SS'),IsTranslated='Y',Name='Periodcontrol' WHERE AD_Field_ID=558778 AND AD_Language='en_US'
;
-- 2017-09-02T17:21:44.183
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='N', SeqNoGrid=0,Updated=TO_TIMESTAMP('2017-09-02 17:21:44','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546068
;
-- 2017-09-02T17:21:44.196
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=50,Updated=TO_TIMESTAMP('2017-09-02 17:21:44','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546062
;
-- 2017-09-02T17:22:05.076
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=40,Updated=TO_TIMESTAMP('2017-09-02 17:22:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546067
;
-- 2017-09-02T17:22:09.635
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=50,Updated=TO_TIMESTAMP('2017-09-02 17:22:09','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546062
;
-- 2017-09-02T17:22:12.245
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=60,Updated=TO_TIMESTAMP('2017-09-02 17:22:12','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546061
;
-- 2017-09-02T17:22:21.512
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed='N', SeqNo=0,Updated=TO_TIMESTAMP('2017-09-02 17:22:21','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546068
;
-- 2017-09-02T17:22:55.781
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed='Y',Updated=TO_TIMESTAMP('2017-09-02 17:22:55','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546063
;
-- 2017-09-02T17:23:33.398
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayed='N',Updated=TO_TIMESTAMP('2017-09-02 17:23:33','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=546063
; | the_stack |
SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL SERIALIZABLE;
-- Test views on temporary tables.
create temp table main_table(a int, b int);
CREATE OR REPLACE FUNCTION trigger_func() RETURNS trigger LANGUAGE plpgsql AS '
BEGIN
RAISE NOTICE ''trigger_func(%) called: action = %, when = %, level = %'', TG_ARGV[0], TG_OP, TG_WHEN, TG_LEVEL;
RETURN NULL;
END;';
CREATE TRIGGER before_upd_a_row_trig BEFORE UPDATE OF a ON main_table
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_upd_a_row');
CREATE TRIGGER after_upd_b_row_trig AFTER UPDATE OF b ON main_table
FOR EACH ROW EXECUTE PROCEDURE trigger_func('after_upd_b_row');
CREATE TRIGGER after_upd_a_b_row_trig AFTER UPDATE OF a, b ON main_table
FOR EACH ROW EXECUTE PROCEDURE trigger_func('after_upd_a_b_row');
CREATE TRIGGER before_upd_a_stmt_trig BEFORE UPDATE OF a ON main_table
FOR EACH STATEMENT EXECUTE PROCEDURE trigger_func('before_upd_a_stmt');
CREATE TRIGGER after_upd_b_stmt_trig AFTER UPDATE OF b ON main_table
FOR EACH STATEMENT EXECUTE PROCEDURE trigger_func('after_upd_b_stmt');
CREATE temp VIEW main_view AS SELECT a, b FROM main_table;
-- VIEW trigger function
CREATE OR REPLACE FUNCTION view_trigger() RETURNS trigger
LANGUAGE plpgsql AS $$
declare
argstr text := '';
begin
for i in 0 .. TG_nargs - 1 loop
if i > 0 then
argstr := argstr || ', ';
end if;
argstr := argstr || TG_argv[i];
end loop;
raise notice '% % % % (%)', TG_RELNAME, TG_WHEN, TG_OP, TG_LEVEL, argstr;
if TG_LEVEL = 'ROW' then
if TG_OP = 'INSERT' then
raise NOTICE 'NEW: %', NEW;
INSERT INTO main_table VALUES (NEW.a, NEW.b);
RETURN NEW;
end if;
if TG_OP = 'UPDATE' then
raise NOTICE 'OLD: %, NEW: %', OLD, NEW;
UPDATE main_table SET a = NEW.a, b = NEW.b WHERE a = OLD.a AND b = OLD.b;
if NOT FOUND then RETURN NULL; end if;
RETURN NEW;
end if;
if TG_OP = 'DELETE' then
raise NOTICE 'OLD: %', OLD;
DELETE FROM main_table WHERE a = OLD.a AND b = OLD.b;
if NOT FOUND then RETURN NULL; end if;
RETURN OLD;
end if;
end if;
RETURN NULL;
end;
$$;
-- Before row triggers aren't allowed on views
CREATE TRIGGER invalid_trig BEFORE INSERT ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_ins_row');
CREATE TRIGGER invalid_trig BEFORE UPDATE ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_upd_row');
CREATE TRIGGER invalid_trig BEFORE DELETE ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_del_row');
-- After row triggers aren't allowed on views
CREATE TRIGGER invalid_trig AFTER INSERT ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_ins_row');
CREATE TRIGGER invalid_trig AFTER UPDATE ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_upd_row');
CREATE TRIGGER invalid_trig AFTER DELETE ON main_view
FOR EACH ROW EXECUTE PROCEDURE trigger_func('before_del_row');
-- Truncate triggers aren't allowed on views
CREATE TRIGGER invalid_trig BEFORE TRUNCATE ON main_view
EXECUTE PROCEDURE trigger_func('before_tru_row');
CREATE TRIGGER invalid_trig AFTER TRUNCATE ON main_view
EXECUTE PROCEDURE trigger_func('before_tru_row');
-- INSTEAD OF triggers aren't allowed on tables
CREATE TRIGGER invalid_trig INSTEAD OF INSERT ON main_table
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_ins');
CREATE TRIGGER invalid_trig INSTEAD OF UPDATE ON main_table
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_upd');
CREATE TRIGGER invalid_trig INSTEAD OF DELETE ON main_table
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_del');
-- Don't support WHEN clauses with INSTEAD OF triggers
CREATE TRIGGER invalid_trig INSTEAD OF UPDATE ON main_view
FOR EACH ROW WHEN (OLD.a <> NEW.a) EXECUTE PROCEDURE view_trigger('instead_of_upd');
-- Don't support column-level INSTEAD OF triggers
CREATE TRIGGER invalid_trig INSTEAD OF UPDATE OF a ON main_view
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_upd');
-- Don't support statement-level INSTEAD OF triggers
CREATE TRIGGER invalid_trig INSTEAD OF UPDATE ON main_view
EXECUTE PROCEDURE view_trigger('instead_of_upd');
-- Valid INSTEAD OF triggers
CREATE TRIGGER instead_of_insert_trig INSTEAD OF INSERT ON main_view
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_ins');
CREATE TRIGGER instead_of_update_trig INSTEAD OF UPDATE ON main_view
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_upd');
CREATE TRIGGER instead_of_delete_trig INSTEAD OF DELETE ON main_view
FOR EACH ROW EXECUTE PROCEDURE view_trigger('instead_of_del');
-- Valid BEFORE statement VIEW triggers
CREATE TRIGGER before_ins_stmt_trig BEFORE INSERT ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('before_view_ins_stmt');
CREATE TRIGGER before_upd_stmt_trig BEFORE UPDATE ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('before_view_upd_stmt');
CREATE TRIGGER before_del_stmt_trig BEFORE DELETE ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('before_view_del_stmt');
-- Valid AFTER statement VIEW triggers
CREATE TRIGGER after_ins_stmt_trig AFTER INSERT ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('after_view_ins_stmt');
CREATE TRIGGER after_upd_stmt_trig AFTER UPDATE ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('after_view_upd_stmt');
CREATE TRIGGER after_del_stmt_trig AFTER DELETE ON main_view
FOR EACH STATEMENT EXECUTE PROCEDURE view_trigger('after_view_del_stmt');
\set QUIET false
-- Insert into view using trigger
INSERT INTO main_view VALUES (20, 30);
INSERT INTO main_view VALUES (21, 31) RETURNING a, b;
-- Table trigger will prevent updates
UPDATE main_view SET b = 31 WHERE a = 20;
UPDATE main_view SET b = 32 WHERE a = 21 AND b = 31 RETURNING a, b;
-- Remove table trigger to allow updates
DROP TRIGGER before_upd_a_row_trig ON main_table;
UPDATE main_view SET b = 31 WHERE a = 20;
UPDATE main_view SET b = 32 WHERE a = 21 AND b = 31 RETURNING a, b;
SELECT * from main_view order by a, b;
-- Before and after stmt triggers should fire even when no rows are affected
UPDATE main_view SET b = 0 WHERE false;
-- Delete from view using trigger
DELETE FROM main_view WHERE a IN (10,20);
DELETE FROM main_view WHERE a = 31 RETURNING a, b;
SELECT * from main_view order by a, b;
\set QUIET true
-- Describe view should list triggers
\d main_view
-- Test dropping view triggers
DROP TRIGGER instead_of_insert_trig ON main_view;
DROP TRIGGER instead_of_delete_trig ON main_view;
\d main_view
-- Test alter (rename) triggers
ALTER TRIGGER after_ins_stmt_trig ON main_view RENAME TO after_ins_stmt_trig_new_name;
\d main_view
DROP VIEW main_view;
DROP TABLE main_table;
-- Test self-referential triggers (on non-temp table).
-- Note: The YugaByte behavior in this case is different from vanilla Postgres.
-- * Vanilla Postgres would throw an error when a row is modified by a before-row trigger
-- and also by the main statement itself.
-- * However, in YugaByte this is allowed and both changes are applied to the row in the
-- expected order (i.e. the before trigger is applied first). This includes the case when
-- the statement (latter) change would override the changes made by the before trigger
-- For example, in the delete example below, the parent's row(s) will be deleted by the main statement
-- after being updated (to decrement nchildren) by the before trigger (of the child row).
create table self_ref_trigger (
id int primary key,
parent int references self_ref_trigger,
data text,
nchildren int not null default 0
);
create function self_ref_trigger_ins_func()
returns trigger language plpgsql as
$$
begin
if new.parent is not null then
update self_ref_trigger set nchildren = nchildren + 1
where id = new.parent;
end if;
return new;
end;
$$;
create trigger self_ref_trigger_ins_trig before insert on self_ref_trigger
for each row execute procedure self_ref_trigger_ins_func();
create function self_ref_trigger_del_func()
returns trigger language plpgsql as
$$
begin
if old.parent is not null then
update self_ref_trigger set nchildren = nchildren - 1
where id = old.parent;
end if;
return old;
end;
$$;
create trigger self_ref_trigger_del_trig before delete on self_ref_trigger
for each row execute procedure self_ref_trigger_del_func();
insert into self_ref_trigger values (1, null, 'root');
insert into self_ref_trigger values (2, 1, 'root child A');
insert into self_ref_trigger values (3, 1, 'root child B');
insert into self_ref_trigger values (4, 2, 'grandchild 1');
insert into self_ref_trigger values (5, 3, 'grandchild 2');
update self_ref_trigger set data = 'root!' where id = 1;
select * from self_ref_trigger;
delete from self_ref_trigger where id in (2, 4);
select * from self_ref_trigger;
delete from self_ref_trigger;
select * from self_ref_trigger;
drop table self_ref_trigger;
drop function self_ref_trigger_ins_func();
drop function self_ref_trigger_del_func();
CREATE TABLE incremental_key(h INT, r INT, v1 INT, v2 INT, PRIMARY KEY(h, r ASC));
CREATE OR REPLACE FUNCTION increment_key() RETURNS TRIGGER
LANGUAGE PLPGSQL AS $$
BEGIN
NEW.r = NEW.r + 1;
RETURN NEW;
END;
$$;
CREATE TRIGGER increment_key_trigger BEFORE UPDATE ON incremental_key
FOR EACH ROW EXECUTE PROCEDURE increment_key();
INSERT INTO incremental_key VALUES(1, 1, 1, 1);
SELECT * FROM incremental_key;
UPDATE incremental_key SET v1 = 10 WHERE h = 1;
SELECT * FROM incremental_key;
DROP TABLE incremental_key;
DROP FUNCTION increment_key;
CREATE TABLE incremental_value(h INT, r INT, v1 INT, v2 INT, PRIMARY KEY(h, r ASC));
CREATE OR REPLACE FUNCTION increment_value() RETURNS TRIGGER
LANGUAGE PLPGSQL AS $$
BEGIN
NEW.v2 = NEW.v2 + 1;
RETURN NEW;
END;
$$;
CREATE TRIGGER increment_value_trigger BEFORE UPDATE ON incremental_value
FOR EACH ROW EXECUTE PROCEDURE increment_value();
INSERT INTO incremental_value VALUES(1, 1, 1, 1);
SELECT * FROM incremental_value;
UPDATE incremental_value SET v1 = 10 WHERE h = 1;
SELECT * FROM incremental_value;
DROP TABLE incremental_value;
DROP FUNCTION increment_value; | the_stack |
-- Operators defined in the 4.0 Catalog:
--
-- Derived via the following SQL run within a 4.0 catalog
/*
\o /tmp/operators
SELECT 'CREATE OPERATOR upg_catalog.' || o.oprname || '('
|| E'\n PROCEDURE = upg_catalog.' || quote_ident(p.proname)
|| case when tleft.typname is not null
then E',\n LEFTARG = upg_catalog.' || quote_ident(tleft.typname) else '' end
|| case when tright.typname is not null
then E',\n RIGHTARG = upg_catalog.' || quote_ident(tright.typname) else '' end
|| case when com.oprname is not null
then E',\n COMMUTATOR = ' || com.oprname else '' end
|| case when neg.oprname is not null
then E',\n NEGATOR = ' || neg.oprname else '' end
|| case when rest.proname is not null
then E',\n RESTRICT = upg_catalog.' || quote_ident(rest.proname) else '' end
|| case when pjoin.proname is not null
then E',\n JOIN = upg_catalog.' || quote_ident(pjoin.proname) else '' end
|| case when o.oprcanhash
then E',\n HASHES' else '' end
|| case when sort1.oprname is not null
then E',\n SORT1 = ' || sort1.oprname else '' end
|| case when sort2.oprname is not null
then E',\n SORT2 = ' || sort2.oprname else '' end
|| case when ltcmp.oprname is not null
then E',\n LTCMP = ' || ltcmp.oprname else '' end
|| case when gtcmp.oprname is not null
then E',\n GTCMP = ' || gtcmp.oprname else '' end
|| E'\n);'
FROM pg_operator o
join pg_namespace n on (o.oprnamespace = n.oid)
join pg_proc p on (o.oprcode = p.oid)
left join pg_type tleft on (o.oprleft = tleft.oid)
left join pg_type tright on (o.oprright = tright.oid)
left join pg_operator com on (o.oprcom = com.oid and o.oid > com.oid)
left join pg_operator neg on (o.oprnegate = neg.oid and o.oid > neg.oid)
left join pg_proc rest on (o.oprrest = rest.oid)
left join pg_proc pjoin on (o.oprjoin = pjoin.oid)
left join pg_operator sort1 on (o.oprlsortop = sort1.oid)
left join pg_operator sort2 on (o.oprrsortop = sort2.oid)
left join pg_operator ltcmp on (o.oprltcmpop = ltcmp.oid)
left join pg_operator gtcmp on (o.oprgtcmpop = gtcmp.oid)
WHERE n.nspname = 'pg_catalog'
ORDER BY 1;
*/
CREATE OPERATOR upg_catalog.!!(
PROCEDURE = upg_catalog.numeric_fac,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.!!=(
PROCEDURE = upg_catalog.int4notin,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.text
);
CREATE OPERATOR upg_catalog.!!=(
PROCEDURE = upg_catalog.oidnotin,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.text
);
CREATE OPERATOR upg_catalog.!(
PROCEDURE = upg_catalog.numeric_fac,
LEFTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.!~(
PROCEDURE = upg_catalog.bpcharregexne,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = ~,
RESTRICT = upg_catalog.regexnesel,
JOIN = upg_catalog.regexnejoinsel
);
CREATE OPERATOR upg_catalog.!~(
PROCEDURE = upg_catalog.nameregexne,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = ~,
RESTRICT = upg_catalog.regexnesel,
JOIN = upg_catalog.regexnejoinsel
);
CREATE OPERATOR upg_catalog.!~(
PROCEDURE = upg_catalog.textregexne,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = ~,
RESTRICT = upg_catalog.regexnesel,
JOIN = upg_catalog.regexnejoinsel
);
CREATE OPERATOR upg_catalog.!~*(
PROCEDURE = upg_catalog.bpcharicregexne,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = ~*,
RESTRICT = upg_catalog.icregexnesel,
JOIN = upg_catalog.icregexnejoinsel
);
CREATE OPERATOR upg_catalog.!~*(
PROCEDURE = upg_catalog.nameicregexne,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = ~*,
RESTRICT = upg_catalog.icregexnesel,
JOIN = upg_catalog.icregexnejoinsel
);
CREATE OPERATOR upg_catalog.!~*(
PROCEDURE = upg_catalog.texticregexne,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = ~*,
RESTRICT = upg_catalog.icregexnesel,
JOIN = upg_catalog.icregexnejoinsel
);
CREATE OPERATOR upg_catalog.!~~(
PROCEDURE = upg_catalog.bpcharnlike,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~,
RESTRICT = upg_catalog.nlikesel,
JOIN = upg_catalog.nlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~(
PROCEDURE = upg_catalog.byteanlike,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
NEGATOR = ~~,
RESTRICT = upg_catalog.nlikesel,
JOIN = upg_catalog.nlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~(
PROCEDURE = upg_catalog.namenlike,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~,
RESTRICT = upg_catalog.nlikesel,
JOIN = upg_catalog.nlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~(
PROCEDURE = upg_catalog.textnlike,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~,
RESTRICT = upg_catalog.nlikesel,
JOIN = upg_catalog.nlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~*(
PROCEDURE = upg_catalog.bpcharicnlike,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~*,
RESTRICT = upg_catalog.icnlikesel,
JOIN = upg_catalog.icnlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~*(
PROCEDURE = upg_catalog.nameicnlike,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~*,
RESTRICT = upg_catalog.icnlikesel,
JOIN = upg_catalog.icnlikejoinsel
);
CREATE OPERATOR upg_catalog.!~~*(
PROCEDURE = upg_catalog.texticnlike,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = ~~*,
RESTRICT = upg_catalog.icnlikesel,
JOIN = upg_catalog.icnlikejoinsel
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_lb,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_ls,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_lseg,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_pb,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_pl,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_ps,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_sb,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.##(
PROCEDURE = upg_catalog.close_sl,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.bitxor,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.box_intersect,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.int2xor,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.int4xor,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.int8xor,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.line_interpt,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.lseg_interpt,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = #
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.path_npoints,
RIGHTARG = upg_catalog.path
);
CREATE OPERATOR upg_catalog.#(
PROCEDURE = upg_catalog.poly_npoints,
RIGHTARG = upg_catalog.polygon
);
CREATE OPERATOR upg_catalog.#<(
PROCEDURE = upg_catalog.tintervallenlt,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #>=
);
CREATE OPERATOR upg_catalog.#<=(
PROCEDURE = upg_catalog.tintervallenle,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #>
);
CREATE OPERATOR upg_catalog.#<>(
PROCEDURE = upg_catalog.tintervallenne,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #=
);
CREATE OPERATOR upg_catalog.#=(
PROCEDURE = upg_catalog.tintervalleneq,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #<>
);
CREATE OPERATOR upg_catalog.#>(
PROCEDURE = upg_catalog.tintervallengt,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #<=
);
CREATE OPERATOR upg_catalog.#>=(
PROCEDURE = upg_catalog.tintervallenge,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.reltime,
NEGATOR = #<
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.int24mod,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.int2mod,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.int42mod,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.int4mod,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.int8mod,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.%(
PROCEDURE = upg_catalog.numeric_mod,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.&&(
PROCEDURE = upg_catalog.arrayoverlap,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = &&,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.&&(
PROCEDURE = upg_catalog.box_overlap,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = &&,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.&&(
PROCEDURE = upg_catalog.circle_overlap,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = &&,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.&&(
PROCEDURE = upg_catalog.poly_overlap,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = &&,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.&&(
PROCEDURE = upg_catalog.tintervalov,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = &&
);
CREATE OPERATOR upg_catalog.&(
PROCEDURE = upg_catalog.bitand,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = &
);
CREATE OPERATOR upg_catalog.&(
PROCEDURE = upg_catalog.inetand,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet
);
CREATE OPERATOR upg_catalog.&(
PROCEDURE = upg_catalog.int2and,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = &
);
CREATE OPERATOR upg_catalog.&(
PROCEDURE = upg_catalog.int4and,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = &
);
CREATE OPERATOR upg_catalog.&(
PROCEDURE = upg_catalog.int8and,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = &
);
CREATE OPERATOR upg_catalog.&<(
PROCEDURE = upg_catalog.box_overleft,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&<(
PROCEDURE = upg_catalog.circle_overleft,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&<(
PROCEDURE = upg_catalog.poly_overleft,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&<|(
PROCEDURE = upg_catalog.box_overbelow,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&<|(
PROCEDURE = upg_catalog.circle_overbelow,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&<|(
PROCEDURE = upg_catalog.poly_overbelow,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&>(
PROCEDURE = upg_catalog.box_overright,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&>(
PROCEDURE = upg_catalog.circle_overright,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.&>(
PROCEDURE = upg_catalog.poly_overright,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.box_mul,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.cash_mul_flt4,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.cash_mul_flt8,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.cash_mul_int2,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.cash_mul_int4,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.circle_mul_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.float48mul,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.float4mul,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.float84mul,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.float8mul,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.flt4_mul_cash,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.money,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.flt8_mul_cash,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.money,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int24mul,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int2_mul_cash,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.money,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int2mul,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int42mul,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int48mul,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int4_mul_cash,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.money,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int4mul,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int84mul,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.int8mul,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.interval_mul,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog.float8,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.mul_d_interval,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog."interval",
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.numeric_mul,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.path_mul_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.*(
PROCEDURE = upg_catalog.point_mul,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = *
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.aclinsert,
LEFTARG = upg_catalog._aclitem,
RIGHTARG = upg_catalog.aclitem
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.box_add,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.cash_pl,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.circle_add_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.date_pl_interval,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.date_pli,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.datetime_pl,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."time",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.datetimetz_pl,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float48pl,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float4pl,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float4up,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float84pl,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float8pl,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.float8up,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.inetpl,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int24pl,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int2pl,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int2up,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int42pl,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int48pl,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int4pl,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int4up,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int84pl,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int8pl,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int8pl_inet,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.int8up,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.integer_pl_date,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.date,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl_date,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog.date,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl_time,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."time",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl_timestamp,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl_timestamptz,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.interval_pl_timetz,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.numeric_add,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.numeric_uplus,
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.path_add,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.path_add_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.point_add,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.time_pl_interval,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timedate_pl,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog.date,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timepl,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.reltime
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timestamp_pl_interval,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timestamptz_pl_interval,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timetz_pl_interval,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog."interval",
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.+(
PROCEDURE = upg_catalog.timetzdate_pl,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = +
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.aclremove,
LEFTARG = upg_catalog._aclitem,
RIGHTARG = upg_catalog.aclitem
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.box_sub,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.cash_mi,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.circle_sub_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.date_mi,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.date_mi_interval,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.date_mii,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float48mi,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float4mi,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float4um,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float84mi,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float8mi,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.float8um,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.inetmi,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.inetmi_int8,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int24mi,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int2mi,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int2um,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int42mi,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int48mi,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int4mi,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int4um,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int84mi,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int8mi,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.int8um,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.interval_mi,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.interval_um,
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.numeric_sub,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.numeric_uminus,
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.path_sub_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.point_sub,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.time_mi_interval,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.time_mi_time,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timemi,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.reltime
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timestamp_mi,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timestamp_mi_interval,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timestamptz_mi,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timestamptz_mi_interval,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog.-(
PROCEDURE = upg_catalog.timetz_mi_interval,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog."interval"
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.box_div,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.cash_div_flt4,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.cash_div_flt8,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.cash_div_int2,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.cash_div_int4,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.circle_div_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.float48div,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.float4div,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.float84div,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.float8div,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int24div,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int2div,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int42div,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int48div,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int4div,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int84div,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.int8div,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.interval_div,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.numeric_div,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.path_div_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog./(
PROCEDURE = upg_catalog.point_div,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point
);
CREATE OPERATOR upg_catalog.<#>(
PROCEDURE = upg_catalog.mktinterval,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.abstimelt,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.array_lt,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.bitlt,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.boollt,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.box_lt,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.bpcharlt,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.bytealt,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.cash_lt,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.charlt,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.circle_lt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.date_lt,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.date_lt_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.date_lt_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.float48lt,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.float4lt,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.float84lt,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.float8lt,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.gpxlogloclt,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int24lt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int28lt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int2lt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int42lt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int48lt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int4lt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int82lt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int84lt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.int8lt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.interval_lt,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.lseg_lt,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = >,
NEGATOR = >=
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.macaddr_lt,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.namelt,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.network_lt,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.numeric_lt,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.oidlt,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.oidvectorlt,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.path_n_lt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = >
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.reltimelt,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.text_lt,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.tidlt,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.time_lt,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamp_lt,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamp_lt_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamp_lt_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamptz_lt,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamptz_lt_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timestamptz_lt_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.timetz_lt,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.tintervallt,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<(
PROCEDURE = upg_catalog.varbitlt,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = >,
NEGATOR = >=,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.box_distance,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.circle_distance,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_cpoly,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.polygon
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_lb,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_pb,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_pc,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.circle
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_pl,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_ppath,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.path
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_ps,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_sb,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.dist_sl,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.line_distance,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.lseg_distance,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.path_distance,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.point_distance,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<->(
PROCEDURE = upg_catalog.poly_distance,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = <->
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.bitshiftleft,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.box_left,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.circle_left,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.int2shl,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.int4shl,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.int8shl,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.network_sub,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = >>
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.point_left,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.poly_left,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<(
PROCEDURE = upg_catalog.tintervalct,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval
);
CREATE OPERATOR upg_catalog.<<=(
PROCEDURE = upg_catalog.network_subeq,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = >>=
);
CREATE OPERATOR upg_catalog.<<|(
PROCEDURE = upg_catalog.box_below,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<|(
PROCEDURE = upg_catalog.circle_below,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<<|(
PROCEDURE = upg_catalog.poly_below,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.abstimele,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.array_le,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.bitle,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.boolle,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.box_le,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.bpcharle,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.byteale,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.cash_le,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.charle,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.circle_le,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.date_le,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.date_le_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.date_le_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.float48le,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.float4le,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.float84le,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.float8le,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.gpxloglocle,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int24le,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int28le,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int2le,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int42le,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int48le,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int4le,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int82le,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int84le,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.int8le,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.interval_le,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.lseg_le,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = >=,
NEGATOR = >
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.macaddr_le,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.namele,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.network_le,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.numeric_le,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.oidle,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.oidvectorle,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.path_n_le,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = >=
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.reltimele,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.text_le,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.tidle,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.time_le,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamp_le,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamp_le_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamp_le_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamptz_le,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamptz_le_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timestamptz_le_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.timetz_le,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.tintervalle,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<=(
PROCEDURE = upg_catalog.varbitle,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = >=,
NEGATOR = >,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.abstimene,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.array_ne,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.bitne,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.boolne,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.bpcharne,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.byteane,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.cash_ne,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.charne,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.circle_ne,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.date_ne,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.date_ne_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.date_ne_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.float48ne,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.float4ne,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.float84ne,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.float8ne,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.gpxloglocne,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int24ne,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int28ne,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int2ne,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int42ne,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int48ne,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int4ne,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int82ne,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int84ne,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.int8ne,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.interval_ne,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.lseg_ne,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.macaddr_ne,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.namene,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.network_ne,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.numeric_ne,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.oidne,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.oidvectorne,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.point_ne,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = <>,
NEGATOR = ~=,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.reltimene,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.textne,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.tidne,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.time_ne,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamp_ne,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamp_ne_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamp_ne_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamptz_ne,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamptz_ne_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timestamptz_ne_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.timetz_ne,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.tintervalne,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<>(
PROCEDURE = upg_catalog.varbitne,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = <>,
NEGATOR = =,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.<?>(
PROCEDURE = upg_catalog.intinterval,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.tinterval
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.arraycontained,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = @>,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.box_contained,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = @>,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.circle_contained,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = @>,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_pb,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_pl,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_ppath,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.path,
COMMUTATOR = @>
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_ps,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_sb,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.on_sl,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.poly_contained,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = @>,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.pt_contained_circle,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = @>
);
CREATE OPERATOR upg_catalog.<@(
PROCEDURE = upg_catalog.pt_contained_poly,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = @>
);
CREATE OPERATOR upg_catalog.<^(
PROCEDURE = upg_catalog.box_below_eq,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.<^(
PROCEDURE = upg_catalog.point_below,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.abstimeeq,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.aclitemeq,
LEFTARG = upg_catalog.aclitem,
RIGHTARG = upg_catalog.aclitem,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.array_eq,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.biteq,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.booleq,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.box_eq,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.bpchareq,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.byteaeq,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.cash_eq,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.chareq,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.cideq,
LEFTARG = upg_catalog.cid,
RIGHTARG = upg_catalog.cid,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.circle_eq,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.date_eq,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.date_eq_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.date_eq_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.float48eq,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.float4eq,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.float84eq,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.float8eq,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.gpxlogloceq,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int24eq,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int28eq,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int2eq,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int2vectoreq,
LEFTARG = upg_catalog.int2vector,
RIGHTARG = upg_catalog.int2vector,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int42eq,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int48eq,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int4eq,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int82eq,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int84eq,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.int8eq,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.interval_eq,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.line_eq,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.lseg_eq,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.macaddr_eq,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.nameeq,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.network_eq,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.numeric_eq,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.oideq,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.oidvectoreq,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.path_n_eq,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.reltimeeq,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.texteq,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.tideq,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.time_eq,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamp_eq,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamp_eq_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamp_eq_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamptz_eq,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamptz_eq_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timestamptz_eq_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.timetz_eq,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.tintervaleq,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.varbiteq,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = =,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
SORT1 = <,
SORT2 = <,
LTCMP = <,
GTCMP = >
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.xideq,
LEFTARG = upg_catalog.xid,
RIGHTARG = upg_catalog.xid,
COMMUTATOR = =,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES
);
CREATE OPERATOR upg_catalog.=(
PROCEDURE = upg_catalog.xideqint4,
LEFTARG = upg_catalog.xid,
RIGHTARG = upg_catalog.int4,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.abstimegt,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.array_gt,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.bitgt,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.boolgt,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.box_gt,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.bpchargt,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.byteagt,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.cash_gt,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.chargt,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.circle_gt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.date_gt,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.date_gt_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.date_gt_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.float48gt,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.float4gt,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.float84gt,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.float8gt,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.gpxloglocgt,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int24gt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int28gt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int2gt,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int42gt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int48gt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int4gt,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int82gt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int84gt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.int8gt,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.interval_gt,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.lseg_gt,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = <,
NEGATOR = <=
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.macaddr_gt,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.namegt,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.network_gt,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.numeric_gt,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.oidgt,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.oidvectorgt,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.path_n_gt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = <
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.reltimegt,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.text_gt,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.tidgt,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.time_gt,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamp_gt,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamp_gt_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamp_gt_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamptz_gt,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamptz_gt_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timestamptz_gt_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.timetz_gt,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.tintervalgt,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>(
PROCEDURE = upg_catalog.varbitgt,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = <,
NEGATOR = <=,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.abstimege,
LEFTARG = upg_catalog.abstime,
RIGHTARG = upg_catalog.abstime,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.array_ge,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.bitge,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.boolge,
LEFTARG = upg_catalog.bool,
RIGHTARG = upg_catalog.bool,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.box_ge,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.bpcharge,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.byteage,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.cash_ge,
LEFTARG = upg_catalog.money,
RIGHTARG = upg_catalog.money,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.charge,
LEFTARG = upg_catalog."char",
RIGHTARG = upg_catalog."char",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.circle_ge,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.date_ge,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.date_ge_timestamp,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.date_ge_timestamptz,
LEFTARG = upg_catalog.date,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.float48ge,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.float4ge,
LEFTARG = upg_catalog.float4,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.float84ge,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float4,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.float8ge,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.gpxloglocge,
LEFTARG = upg_catalog.gpxlogloc,
RIGHTARG = upg_catalog.gpxlogloc,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int24ge,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int28ge,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int2ge,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int42ge,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int48ge,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int4ge,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int82ge,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int84ge,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.int8ge,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.interval_ge,
LEFTARG = upg_catalog."interval",
RIGHTARG = upg_catalog."interval",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.lseg_ge,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = <=,
NEGATOR = <
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.macaddr_ge,
LEFTARG = upg_catalog.macaddr,
RIGHTARG = upg_catalog.macaddr,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.namege,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.network_ge,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.numeric_ge,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.oidge,
LEFTARG = upg_catalog.oid,
RIGHTARG = upg_catalog.oid,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.oidvectorge,
LEFTARG = upg_catalog.oidvector,
RIGHTARG = upg_catalog.oidvector,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.path_n_ge,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path,
COMMUTATOR = <=
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.reltimege,
LEFTARG = upg_catalog.reltime,
RIGHTARG = upg_catalog.reltime,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.text_ge,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.tidge,
LEFTARG = upg_catalog.tid,
RIGHTARG = upg_catalog.tid,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.time_ge,
LEFTARG = upg_catalog."time",
RIGHTARG = upg_catalog."time",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamp_ge,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamp_ge_date,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.date,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamp_ge_timestamptz,
LEFTARG = upg_catalog."timestamp",
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamptz_ge,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.timestamptz,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamptz_ge_date,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog.date,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timestamptz_ge_timestamp,
LEFTARG = upg_catalog.timestamptz,
RIGHTARG = upg_catalog."timestamp",
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.timetz_ge,
LEFTARG = upg_catalog.timetz,
RIGHTARG = upg_catalog.timetz,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.tintervalge,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>=(
PROCEDURE = upg_catalog.varbitge,
LEFTARG = upg_catalog.varbit,
RIGHTARG = upg_catalog.varbit,
COMMUTATOR = <=,
NEGATOR = <,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.bitshiftright,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.box_right,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.circle_right,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.int2shr,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.int4shr,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.int8shr,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.network_sup,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = <<
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.point_right,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.>>(
PROCEDURE = upg_catalog.poly_right,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.>>=(
PROCEDURE = upg_catalog.network_supeq,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet,
COMMUTATOR = <<=
);
CREATE OPERATOR upg_catalog.>^(
PROCEDURE = upg_catalog.box_above_eq,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.>^(
PROCEDURE = upg_catalog.point_above,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.box_overlap,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.areasel,
JOIN = upg_catalog.areajoinsel
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.inter_lb,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.inter_sb,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.inter_sl,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.line_intersect,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = ?#
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.lseg_intersect,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = ?#
);
CREATE OPERATOR upg_catalog.?#(
PROCEDURE = upg_catalog.path_inter,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.path
);
CREATE OPERATOR upg_catalog.?-(
PROCEDURE = upg_catalog.line_horizontal,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.?-(
PROCEDURE = upg_catalog.lseg_horizontal,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.?-(
PROCEDURE = upg_catalog.point_horiz,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = ?-
);
CREATE OPERATOR upg_catalog.?-|(
PROCEDURE = upg_catalog.line_perp,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = ?-|
);
CREATE OPERATOR upg_catalog.?-|(
PROCEDURE = upg_catalog.lseg_perp,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = ?-|
);
CREATE OPERATOR upg_catalog.?|(
PROCEDURE = upg_catalog.line_vertical,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.?|(
PROCEDURE = upg_catalog.lseg_vertical,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.?|(
PROCEDURE = upg_catalog.point_vert,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = ?|
);
CREATE OPERATOR upg_catalog.?||(
PROCEDURE = upg_catalog.line_parallel,
LEFTARG = upg_catalog.line,
RIGHTARG = upg_catalog.line,
COMMUTATOR = ?||
);
CREATE OPERATOR upg_catalog.?||(
PROCEDURE = upg_catalog.lseg_parallel,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.lseg,
COMMUTATOR = ?||
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.box_contained,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = ~,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.circle_contained,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = ~,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.float4abs,
RIGHTARG = upg_catalog.float4
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.float8abs,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.int2abs,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.int4abs,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.int8abs,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.numeric_abs,
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_pb,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_pl,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_ppath,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.path,
COMMUTATOR = ~
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_ps,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_sb,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.on_sl,
LEFTARG = upg_catalog.lseg,
RIGHTARG = upg_catalog.line
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.poly_contained,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = ~,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.pt_contained_circle,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = ~
);
CREATE OPERATOR upg_catalog.@(
PROCEDURE = upg_catalog.pt_contained_poly,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = ~
);
CREATE OPERATOR upg_catalog.@-@(
PROCEDURE = upg_catalog.lseg_length,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.@-@(
PROCEDURE = upg_catalog.path_length,
RIGHTARG = upg_catalog.path
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.aclcontains,
LEFTARG = upg_catalog._aclitem,
RIGHTARG = upg_catalog.aclitem
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.arraycontains,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray,
COMMUTATOR = <@,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.box_contain,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = <@,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.circle_contain,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = <@,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.circle_contain_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point,
COMMUTATOR = <@
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.path_contain_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point,
COMMUTATOR = <@
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.poly_contain,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = <@,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.@>(
PROCEDURE = upg_catalog.poly_contain_pt,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.point,
COMMUTATOR = <@
);
CREATE OPERATOR upg_catalog.@@(
PROCEDURE = upg_catalog.box_center,
RIGHTARG = upg_catalog.box
);
CREATE OPERATOR upg_catalog.@@(
PROCEDURE = upg_catalog.circle_center,
RIGHTARG = upg_catalog.circle
);
CREATE OPERATOR upg_catalog.@@(
PROCEDURE = upg_catalog.lseg_center,
RIGHTARG = upg_catalog.lseg
);
CREATE OPERATOR upg_catalog.@@(
PROCEDURE = upg_catalog.path_center,
RIGHTARG = upg_catalog.path
);
CREATE OPERATOR upg_catalog.@@(
PROCEDURE = upg_catalog.poly_center,
RIGHTARG = upg_catalog.polygon
);
CREATE OPERATOR upg_catalog.^(
PROCEDURE = upg_catalog.dpow,
LEFTARG = upg_catalog.float8,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.^(
PROCEDURE = upg_catalog.numeric_power,
LEFTARG = upg_catalog."numeric",
RIGHTARG = upg_catalog."numeric"
);
CREATE OPERATOR upg_catalog.|&>(
PROCEDURE = upg_catalog.box_overabove,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.|&>(
PROCEDURE = upg_catalog.circle_overabove,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.|&>(
PROCEDURE = upg_catalog.poly_overabove,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.bitor,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit",
COMMUTATOR = |
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.inetor,
LEFTARG = upg_catalog.inet,
RIGHTARG = upg_catalog.inet
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.int2or,
LEFTARG = upg_catalog.int2,
RIGHTARG = upg_catalog.int2,
COMMUTATOR = |
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.int4or,
LEFTARG = upg_catalog.int4,
RIGHTARG = upg_catalog.int4,
COMMUTATOR = |
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.int8or,
LEFTARG = upg_catalog.int8,
RIGHTARG = upg_catalog.int8,
COMMUTATOR = |
);
CREATE OPERATOR upg_catalog.|(
PROCEDURE = upg_catalog.tintervalstart,
RIGHTARG = upg_catalog.tinterval
);
CREATE OPERATOR upg_catalog.|/(
PROCEDURE = upg_catalog.dsqrt,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.|>>(
PROCEDURE = upg_catalog.box_above,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.|>>(
PROCEDURE = upg_catalog.circle_above,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.|>>(
PROCEDURE = upg_catalog.poly_above,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
RESTRICT = upg_catalog.positionsel,
JOIN = upg_catalog.positionjoinsel
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.array_append,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyelement
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.array_cat,
LEFTARG = upg_catalog.anyarray,
RIGHTARG = upg_catalog.anyarray
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.array_prepend,
LEFTARG = upg_catalog.anyelement,
RIGHTARG = upg_catalog.anyarray
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.bitcat,
LEFTARG = upg_catalog."bit",
RIGHTARG = upg_catalog."bit"
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.byteacat,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea
);
CREATE OPERATOR upg_catalog.||(
PROCEDURE = upg_catalog.textcat,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text
);
CREATE OPERATOR upg_catalog.||/(
PROCEDURE = upg_catalog.dcbrt,
RIGHTARG = upg_catalog.float8
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.aclcontains,
LEFTARG = upg_catalog._aclitem,
RIGHTARG = upg_catalog.aclitem
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.bitnot,
RIGHTARG = upg_catalog."bit"
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.box_contain,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = @,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.bpcharregexeq,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = !~,
RESTRICT = upg_catalog.regexeqsel,
JOIN = upg_catalog.regexeqjoinsel
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.circle_contain,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = @,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.circle_contain_pt,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.point,
COMMUTATOR = @
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.inetnot,
RIGHTARG = upg_catalog.inet
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.int2not,
RIGHTARG = upg_catalog.int2
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.int4not,
RIGHTARG = upg_catalog.int4
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.int8not,
RIGHTARG = upg_catalog.int8
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.nameregexeq,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = !~,
RESTRICT = upg_catalog.regexeqsel,
JOIN = upg_catalog.regexeqjoinsel
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.path_contain_pt,
LEFTARG = upg_catalog.path,
RIGHTARG = upg_catalog.point,
COMMUTATOR = @
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.poly_contain,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = @,
RESTRICT = upg_catalog.contsel,
JOIN = upg_catalog.contjoinsel
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.poly_contain_pt,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.point,
COMMUTATOR = @
);
CREATE OPERATOR upg_catalog.~(
PROCEDURE = upg_catalog.textregexeq,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = !~,
RESTRICT = upg_catalog.regexeqsel,
JOIN = upg_catalog.regexeqjoinsel
);
CREATE OPERATOR upg_catalog.~*(
PROCEDURE = upg_catalog.bpcharicregexeq,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = !~*,
RESTRICT = upg_catalog.icregexeqsel,
JOIN = upg_catalog.icregexeqjoinsel
);
CREATE OPERATOR upg_catalog.~*(
PROCEDURE = upg_catalog.nameicregexeq,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = !~*,
RESTRICT = upg_catalog.icregexeqsel,
JOIN = upg_catalog.icregexeqjoinsel
);
CREATE OPERATOR upg_catalog.~*(
PROCEDURE = upg_catalog.texticregexeq,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = !~*,
RESTRICT = upg_catalog.icregexeqsel,
JOIN = upg_catalog.icregexeqjoinsel
);
CREATE OPERATOR upg_catalog.~<=~(
PROCEDURE = upg_catalog.bpchar_pattern_le,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~>=~,
NEGATOR = ~>~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~<=~(
PROCEDURE = upg_catalog.name_pattern_le,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~>=~,
NEGATOR = ~>~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~<=~(
PROCEDURE = upg_catalog.text_pattern_le,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~>=~,
NEGATOR = ~>~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~<>~(
PROCEDURE = upg_catalog.bpchar_pattern_ne,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~<>~,
NEGATOR = ~=~,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.~<>~(
PROCEDURE = upg_catalog.name_pattern_ne,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~<>~,
NEGATOR = ~=~,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.~<>~(
PROCEDURE = upg_catalog.textne,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~<>~,
NEGATOR = ~=~,
RESTRICT = upg_catalog.neqsel,
JOIN = upg_catalog.neqjoinsel
);
CREATE OPERATOR upg_catalog.~<~(
PROCEDURE = upg_catalog.bpchar_pattern_lt,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~>~,
NEGATOR = ~>=~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~<~(
PROCEDURE = upg_catalog.name_pattern_lt,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~>~,
NEGATOR = ~>=~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~<~(
PROCEDURE = upg_catalog.text_pattern_lt,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~>~,
NEGATOR = ~>=~,
RESTRICT = upg_catalog.scalarltsel,
JOIN = upg_catalog.scalarltjoinsel
);
CREATE OPERATOR upg_catalog.~=(
PROCEDURE = upg_catalog.box_same,
LEFTARG = upg_catalog.box,
RIGHTARG = upg_catalog.box,
COMMUTATOR = ~=,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.~=(
PROCEDURE = upg_catalog.circle_same,
LEFTARG = upg_catalog.circle,
RIGHTARG = upg_catalog.circle,
COMMUTATOR = ~=,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.~=(
PROCEDURE = upg_catalog.point_eq,
LEFTARG = upg_catalog.point,
RIGHTARG = upg_catalog.point,
COMMUTATOR = ~=,
NEGATOR = <>,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.~=(
PROCEDURE = upg_catalog.poly_same,
LEFTARG = upg_catalog.polygon,
RIGHTARG = upg_catalog.polygon,
COMMUTATOR = ~=,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.~=(
PROCEDURE = upg_catalog.tintervalsame,
LEFTARG = upg_catalog.tinterval,
RIGHTARG = upg_catalog.tinterval,
COMMUTATOR = ~=,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel
);
CREATE OPERATOR upg_catalog.~=~(
PROCEDURE = upg_catalog.bpchar_pattern_eq,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~=~,
NEGATOR = ~<>~,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = ~<~,
SORT2 = ~<~,
LTCMP = ~<~,
GTCMP = ~>~
);
CREATE OPERATOR upg_catalog.~=~(
PROCEDURE = upg_catalog.name_pattern_eq,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~=~,
NEGATOR = ~<>~,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = ~<~,
SORT2 = ~<~,
LTCMP = ~<~,
GTCMP = ~>~
);
CREATE OPERATOR upg_catalog.~=~(
PROCEDURE = upg_catalog.texteq,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~=~,
NEGATOR = ~<>~,
RESTRICT = upg_catalog.eqsel,
JOIN = upg_catalog.eqjoinsel,
HASHES,
SORT1 = ~<~,
SORT2 = ~<~,
LTCMP = ~<~,
GTCMP = ~>~
);
CREATE OPERATOR upg_catalog.~>=~(
PROCEDURE = upg_catalog.bpchar_pattern_ge,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~<=~,
NEGATOR = ~<~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~>=~(
PROCEDURE = upg_catalog.name_pattern_ge,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~<=~,
NEGATOR = ~<~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~>=~(
PROCEDURE = upg_catalog.text_pattern_ge,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~<=~,
NEGATOR = ~<~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~>~(
PROCEDURE = upg_catalog.bpchar_pattern_gt,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.bpchar,
COMMUTATOR = ~<~,
NEGATOR = ~<=~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~>~(
PROCEDURE = upg_catalog.name_pattern_gt,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.name,
COMMUTATOR = ~<~,
NEGATOR = ~<=~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~>~(
PROCEDURE = upg_catalog.text_pattern_gt,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
COMMUTATOR = ~<~,
NEGATOR = ~<=~,
RESTRICT = upg_catalog.scalargtsel,
JOIN = upg_catalog.scalargtjoinsel
);
CREATE OPERATOR upg_catalog.~~(
PROCEDURE = upg_catalog.bpcharlike,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~,
RESTRICT = upg_catalog.likesel,
JOIN = upg_catalog.likejoinsel
);
CREATE OPERATOR upg_catalog.~~(
PROCEDURE = upg_catalog.bytealike,
LEFTARG = upg_catalog.bytea,
RIGHTARG = upg_catalog.bytea,
NEGATOR = !~~,
RESTRICT = upg_catalog.likesel,
JOIN = upg_catalog.likejoinsel
);
CREATE OPERATOR upg_catalog.~~(
PROCEDURE = upg_catalog.namelike,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~,
RESTRICT = upg_catalog.likesel,
JOIN = upg_catalog.likejoinsel
);
CREATE OPERATOR upg_catalog.~~(
PROCEDURE = upg_catalog.textlike,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~,
RESTRICT = upg_catalog.likesel,
JOIN = upg_catalog.likejoinsel
);
CREATE OPERATOR upg_catalog.~~*(
PROCEDURE = upg_catalog.bpchariclike,
LEFTARG = upg_catalog.bpchar,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~*,
RESTRICT = upg_catalog.iclikesel,
JOIN = upg_catalog.iclikejoinsel
);
CREATE OPERATOR upg_catalog.~~*(
PROCEDURE = upg_catalog.nameiclike,
LEFTARG = upg_catalog.name,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~*,
RESTRICT = upg_catalog.iclikesel,
JOIN = upg_catalog.iclikejoinsel
);
CREATE OPERATOR upg_catalog.~~*(
PROCEDURE = upg_catalog.texticlike,
LEFTARG = upg_catalog.text,
RIGHTARG = upg_catalog.text,
NEGATOR = !~~*,
RESTRICT = upg_catalog.iclikesel,
JOIN = upg_catalog.iclikejoinsel
); | the_stack |
-- This file and its contents are licensed under the Timescale License.
-- Please see the included NOTICE for copyright information and
-- LICENSE-TIMESCALE for a copy of the license.
\c :TEST_DBNAME :ROLE_CLUSTER_SUPERUSER;
\unset ECHO
\o /dev/null
\ir include/filter_exec.sql
\ir include/remote_exec.sql
\o
\set ECHO all
\set DATA_NODE_1 :TEST_DBNAME _1
\set DATA_NODE_2 :TEST_DBNAME _2
\set DATA_NODE_3 :TEST_DBNAME _3
\set TABLESPACE_1 :TEST_DBNAME _1
\set TABLESPACE_2 :TEST_DBNAME _2
SELECT
test.make_tablespace_path(:'TEST_TABLESPACE1_PREFIX', :'TEST_DBNAME') AS spc1path,
test.make_tablespace_path(:'TEST_TABLESPACE2_PREFIX', :'TEST_DBNAME') AS spc2path
\gset
SELECT (add_data_node (name, host => 'localhost', DATABASE => name)).*
FROM (VALUES (:'DATA_NODE_1'), (:'DATA_NODE_2'), (:'DATA_NODE_3')) v (name);
GRANT USAGE ON FOREIGN SERVER :DATA_NODE_1, :DATA_NODE_2, :DATA_NODE_3 TO PUBLIC;
-- Import testsupport.sql file to data nodes
\unset ECHO
\o /dev/null
\c :DATA_NODE_1
SET client_min_messages TO ERROR;
\ir :TEST_SUPPORT_FILE
\c :DATA_NODE_2
SET client_min_messages TO ERROR;
\ir :TEST_SUPPORT_FILE
\c :DATA_NODE_3
SET client_min_messages TO ERROR;
\ir :TEST_SUPPORT_FILE
\c :TEST_DBNAME :ROLE_CLUSTER_SUPERUSER;
\o
SET client_min_messages TO NOTICE;
\set ECHO all
SET ROLE :ROLE_1;
CREATE TABLE hyper (
time BIGINT NOT NULL,
device_id TEXT NOT NULL,
sensor_1 NUMERIC NULL DEFAULT 1
);
-- Table to log trigger events
CREATE TABLE trigger_events (
tg_when text,
tg_level text,
tg_op text,
tg_name text
);
CREATE OR REPLACE FUNCTION test_trigger()
RETURNS TRIGGER LANGUAGE PLPGSQL AS
$BODY$
BEGIN
INSERT INTO public.trigger_events VALUES (TG_WHEN, TG_LEVEL, TG_OP, TG_NAME);
RETURN NEW;
END
$BODY$;
CALL distributed_exec($$
CREATE TABLE trigger_events (
tg_when text,
tg_level text,
tg_op text,
tg_name text
);
$$, ARRAY[:'DATA_NODE_1']);
-- row triggers: BEFORE
CREATE TRIGGER _0_test_trigger_insert
BEFORE INSERT ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_update
BEFORE UPDATE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_delete
BEFORE DELETE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER z_test_trigger_all
BEFORE INSERT OR UPDATE OR DELETE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
-- row triggers: AFTER
CREATE TRIGGER _0_test_trigger_insert_after
AFTER INSERT ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_insert_after_when_dev1
AFTER INSERT ON hyper
FOR EACH ROW
WHEN (NEW.device_id = 'dev1')
EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_update_after
AFTER UPDATE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_delete_after
AFTER DELETE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
CREATE TRIGGER z_test_trigger_all_after
AFTER INSERT OR UPDATE OR DELETE ON hyper
FOR EACH ROW EXECUTE FUNCTION test_trigger();
--- Create some triggers before we turn the table into a distributed
--- hypertable and some triggers after so that we test both cases.
SELECT * FROM create_distributed_hypertable('hyper', 'time', 'device_id', 3, chunk_time_interval => 10, data_nodes => ARRAY[:'DATA_NODE_1', :'DATA_NODE_2']);
-- FAILURE cases
\set ON_ERROR_STOP 0
-- Check that CREATE TRIGGER fails if a trigger already exists on a data node.
CALL distributed_exec($$
CREATE TRIGGER _0_test_trigger_insert_s_before
BEFORE INSERT ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
$$, ARRAY[:'DATA_NODE_1']);
CREATE TRIGGER _0_test_trigger_insert_s_before
BEFORE INSERT ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
CALL distributed_exec($$
DROP TRIGGER _0_test_trigger_insert_s_before ON hyper;
$$, ARRAY[:'DATA_NODE_1']);
-- Test that trigger execution fails if trigger_events table doesn't
-- exist on all nodes. Insert should fail
INSERT INTO hyper(time, device_id,sensor_1) VALUES
(1257987600000000000, 'dev1', 1);
\set ON_ERROR_STOP 1
-- Now, create trigger_events on the other nodes
CALL distributed_exec($$
CREATE TABLE trigger_events (
tg_when text,
tg_level text,
tg_op text,
tg_name text
);
$$, ARRAY[:'DATA_NODE_2', :'DATA_NODE_3']);
-- Test that trigger fails if the user isn't the owner of the trigger
-- function on one of the nodes.
RESET ROLE;
CALL distributed_exec($$
ALTER FUNCTION test_trigger OWNER TO current_user;
$$, ARRAY[:'DATA_NODE_1']);
SET ROLE :ROLE_1;
\set ON_ERROR_STOP 0
-- Insert should fail since the trigger function on DN1 isn't owned by
-- the user.
INSERT INTO hyper(time, device_id,sensor_1) VALUES
(1257987600000000000, 'dev1', 1);
\set ON_ERROR_STOP 1
-- Reset the owner of the trigger function on DN1 to the non-superuser
RESET ROLE;
CALL distributed_exec('ALTER FUNCTION test_trigger OWNER TO ' || :'ROLE_1', ARRAY[:'DATA_NODE_1']);
SET ROLE :ROLE_1;
-- Add more triggers after the distributed hypertable is created
-- statement triggers: BEFORE
CREATE TRIGGER _0_test_trigger_insert_s_before
BEFORE INSERT ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_update_s_before
BEFORE UPDATE ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_delete_s_before
BEFORE DELETE ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
-- statement triggers: AFTER
CREATE TRIGGER _0_test_trigger_insert_s_after
AFTER INSERT ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_update_s_after
AFTER UPDATE ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
CREATE TRIGGER _0_test_trigger_delete_s_after
AFTER DELETE ON hyper
FOR EACH STATEMENT EXECUTE FUNCTION test_trigger();
--test triggers before create_distributed_hypertable
INSERT INTO hyper(time, device_id,sensor_1) VALUES
(1257987600000000000, 'dev1', 1);
-- Show trigger count on access node. Only statement-level triggers
-- fire on the access node.
SELECT tg_when, tg_level, tg_op, tg_name, count(*)
FROM trigger_events
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4;
-- Show trigger counts on data nodes. Both statement-level and
-- row-level triggers fire on the data nodes.
SELECT * FROM test.remote_exec(ARRAY[:'DATA_NODE_1', :'DATA_NODE_2'], $$
SELECT tg_when, tg_level, tg_op, tg_name, count(*)
FROM trigger_events
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4;
$$);
TRUNCATE trigger_events;
CALL distributed_exec($$
TRUNCATE trigger_events;
$$);
INSERT INTO hyper(time, device_id,sensor_1) VALUES
(1257987700000000000, 'dev2', 1), (1257987800000000000, 'dev2', 1);
UPDATE hyper SET sensor_1 = 2;
DELETE FROM hyper;
SELECT tg_when, tg_level, tg_op, tg_name, count(*)
FROM trigger_events
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4;
SELECT * FROM test.remote_exec(ARRAY[:'DATA_NODE_1', :'DATA_NODE_2'], $$
SELECT tg_when, tg_level, tg_op, tg_name, count(*)
FROM trigger_events
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4;
$$);
-- Attach a new data node and show that the hypertable is created on
-- the node, including its triggers.
SELECT attach_data_node(:'DATA_NODE_3', 'hyper');
-- Show that triggers are created on the new data node after attaching
SELECT * FROM test.remote_exec(ARRAY[:'DATA_NODE_3'],
$$
SELECT test.show_triggers('hyper');
$$);
-- Insert data on the new data node to create a chunk and fire
-- triggers.
INSERT INTO hyper(time, device_id,sensor_1) VALUES
(1257987700000000000, 'dev4', 1);
SELECT * FROM test.remote_exec(ARRAY[:'DATA_NODE_3'], $$
SELECT tg_when, tg_level, tg_op, tg_name, count(*)
FROM trigger_events
GROUP BY 1,2,3,4
ORDER BY 1,2,3,4;
$$);
--test drop trigger
DROP TRIGGER _0_test_trigger_insert ON hyper;
DROP TRIGGER _0_test_trigger_insert_s_before ON hyper;
DROP TRIGGER _0_test_trigger_insert_after ON hyper;
DROP TRIGGER _0_test_trigger_insert_s_after ON hyper;
DROP TRIGGER _0_test_trigger_update ON hyper;
DROP TRIGGER _0_test_trigger_update_s_before ON hyper;
DROP TRIGGER _0_test_trigger_update_after ON hyper;
DROP TRIGGER _0_test_trigger_update_s_after ON hyper;
DROP TRIGGER _0_test_trigger_delete ON hyper;
DROP TRIGGER _0_test_trigger_delete_s_before ON hyper;
DROP TRIGGER _0_test_trigger_delete_after ON hyper;
DROP TRIGGER _0_test_trigger_delete_s_after ON hyper;
DROP TRIGGER z_test_trigger_all ON hyper;
DROP TRIGGER z_test_trigger_all_after ON hyper;
DROP TRIGGER _0_test_trigger_insert_after_when_dev1 ON hyper;
-- Triggers are dropped on all data nodes:
SELECT * FROM test.remote_exec(ARRAY[:'DATA_NODE_1', :'DATA_NODE_2', :'DATA_NODE_3'], $$
SELECT st."Child" as chunk_relid, test.show_triggers((st)."Child")
FROM test.show_subtables('hyper') st;
$$);
-- Test triggers that modify tuples and make sure RETURNING is done
-- properly (i.e., the modified tuple is returned).
-- Add serial (autoincrement) and DEFAULT value columns to test that
-- these work with custom insert nodes.
CREATE TABLE disttable(
id serial,
time timestamptz NOT NULL,
device int DEFAULT 100,
temp_c float
);
SELECT * FROM create_distributed_hypertable('disttable', 'time', 'device');
-- Create a datatable to source data from. Add array of composite data
-- type to test switching to text mode below. Arrays include the type
-- Oid when serialized in binary format. Since the Oid of a
-- user-created type can differ across data nodes, such serialization
-- is not safe.
CREATE TABLE datatable (LIKE disttable);
INSERT INTO datatable (id, time, device, temp_c) VALUES
(1, '2017-01-01 06:01', 1, 1),
(2, '2017-01-01 09:11', 3, 2),
(3, '2017-01-01 08:01', 1, 3),
(4, '2017-01-02 08:01', 2, 4),
(5, '2018-07-02 08:01', 87, 5),
(6, '2018-07-01 06:01', 13, 6),
(7, '2018-07-01 09:11', 90, 7),
(8, '2018-07-01 08:01', 29, 8);
CREATE OR REPLACE FUNCTION temp_increment_trigger()
RETURNS TRIGGER LANGUAGE PLPGSQL AS
$BODY$
BEGIN
IF TG_OP = 'INSERT' THEN
NEW.temp_c = NEW.temp_c+1.0;
END IF;
RETURN NEW;
END
$BODY$;
-- Add a BEFORE INSERT trigger to see that plan reverts to
-- DataNodeDispatch when using RETURNING
CREATE TRIGGER _0_temp_increment
BEFORE INSERT ON disttable
FOR EACH ROW EXECUTE FUNCTION temp_increment_trigger();
-- Show that the trigger exists on a data node
SELECT test.remote_exec(ARRAY[:'DATA_NODE_3'], $$ SELECT test.show_triggers('disttable') $$);
TRUNCATE disttable;
-- Show EXPLAINs for INSERT first with DataNodeCopy disabled. Should
-- always use DataNodeDispatch
SET timescaledb.enable_distributed_insert_with_copy=false;
-- Without RETURNING
EXPLAIN VERBOSE
INSERT INTO disttable (time, device, temp_c)
SELECT time, device, temp_c FROM datatable;
-- With RETURNING
EXPLAIN VERBOSE
INSERT INTO disttable (time, device, temp_c)
SELECT time, device, temp_c FROM datatable RETURNING *;
-- With DataNodeCopy enabled, should use DataNodeCopy when there's no
-- RETURNING clause, but with RETURNING it should use DataNodeDispatch
-- due to the modifying trigger.
SET timescaledb.enable_distributed_insert_with_copy=true;
-- Without RETURNING
EXPLAIN VERBOSE
INSERT INTO disttable (time, device, temp_c)
SELECT time, device, temp_c FROM datatable;
-- With RETURNING
EXPLAIN VERBOSE
INSERT INTO disttable (time, device, temp_c)
SELECT time, device, temp_c FROM datatable RETURNING *;
-- Do the actual INSERT, but wrap in CTE to ensure ordered output in
-- order to avoid flakiness. The returned rows should have temp_c
-- incremented by the trigger
WITH inserted AS (
INSERT INTO disttable (time, device, temp_c)
SELECT time, device, temp_c FROM datatable RETURNING *
) SELECT * FROM inserted ORDER BY 1;
-- Show that the RETURNING rows are the same as those stored after
-- INSERT. Expect temp_c to be incremented by one compared to the
-- original data.
SELECT di.id, di.time, di.device, di.temp_c AS temp_c, da.temp_c AS temp_c_orig
FROM disttable di, datatable da
WHERE di.id = da.id
ORDER BY 1;
-- Rename a trigger
ALTER TRIGGER _0_temp_increment ON disttable RENAME TO _1_temp_increment;
-- Show that remote chunks have the new trigger name
SELECT * FROM test.remote_exec(NULL, $$
SELECT st."Child" as chunk_relid, test.show_triggers((st)."Child")
FROM test.show_subtables('disttable') st;
$$);
-- Drop the trigger and show that it is dropped on data nodes
DROP TRIGGER _1_temp_increment ON disttable;
SELECT * FROM test.remote_exec(NULL, $$
SELECT st."Child" as chunk_relid, test.show_triggers((st)."Child")
FROM test.show_subtables('disttable') st;
$$); | the_stack |
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for cms_apicustom
-- ----------------------------
DROP TABLE IF EXISTS `cms_apicustom`;
CREATE TABLE `cms_apicustom` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`permalink` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`tabel` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`aksi` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`kolom` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`orderby` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`sub_query_1` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`sql_where` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`nama` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`keterangan` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`parameter` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`method_type` varchar(25) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`parameters` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`responses` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_apicustom
-- ----------------------------
-- ----------------------------
-- Table structure for cms_apikey
-- ----------------------------
DROP TABLE IF EXISTS `cms_apikey`;
CREATE TABLE `cms_apikey` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`screetkey` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`hit` int NULL DEFAULT NULL,
`status` varchar(25) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'active',
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_apikey
-- ----------------------------
-- ----------------------------
-- Table structure for cms_dashboard
-- ----------------------------
DROP TABLE IF EXISTS `cms_dashboard`;
CREATE TABLE `cms_dashboard` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`id_cms_privileges` int NULL DEFAULT NULL,
`content` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_dashboard
-- ----------------------------
-- ----------------------------
-- Table structure for cms_email_queues
-- ----------------------------
DROP TABLE IF EXISTS `cms_email_queues`;
CREATE TABLE `cms_email_queues` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`send_at` datetime NULL DEFAULT NULL,
`email_recipient` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email_from_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email_from_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email_cc_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email_subject` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email_content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`email_attachments` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`is_sent` tinyint(1) NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_email_queues
-- ----------------------------
-- ----------------------------
-- Table structure for cms_email_templates
-- ----------------------------
DROP TABLE IF EXISTS `cms_email_templates`;
CREATE TABLE `cms_email_templates` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`slug` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`subject` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`content` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`description` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`from_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`from_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`cc_email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_email_templates
-- ----------------------------
INSERT INTO `cms_email_templates` VALUES (1, 'Email Template Forgot Password Backend', 'forgot_password_backend', NULL, '<p>Hi,</p><p>Someone requested forgot password, here is your new password : </p><p>[password]</p><p><br></p><p>--</p><p>Regards,</p><p>Admin</p>', '[password]', 'System', 'system@crudbooster.com', NULL, '2022-04-15 06:06:21', NULL);
-- ----------------------------
-- Table structure for cms_logs
-- ----------------------------
DROP TABLE IF EXISTS `cms_logs`;
CREATE TABLE `cms_logs` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`ipaddress` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`useragent` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`url` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`description` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`details` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`id_cms_users` int NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 6 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_logs
-- ----------------------------
INSERT INTO `cms_logs` VALUES (1, '127.0.0.1', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:99.0) Gecko/20100101 Firefox/99.0', 'http://localhost/admin/login', 'admin@crudbooster.com login with IP Address 127.0.0.1', '', 1, '2022-04-15 06:07:12', NULL);
INSERT INTO `cms_logs` VALUES (2, '127.0.0.1', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:99.0) Gecko/20100101 Firefox/99.0', 'http://localhost/admin/device/add-save', 'Add New Data nomer m3 at Device', '', 1, '2022-04-15 06:13:42', NULL);
INSERT INTO `cms_logs` VALUES (3, '127.0.0.1', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:99.0) Gecko/20100101 Firefox/99.0', 'http://laravel-whatsapp-server.test/admin/login', 'admin@crudbooster.com login with IP Address 127.0.0.1', '', 1, '2022-04-15 06:17:05', NULL);
INSERT INTO `cms_logs` VALUES (4, '127.0.0.1', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:99.0) Gecko/20100101 Firefox/99.0', 'http://laravel-whatsapp-server.test/admin/login', 'admin@crudbooster.com login with IP Address 127.0.0.1', '', 1, '2022-04-17 18:04:18', NULL);
INSERT INTO `cms_logs` VALUES (5, '127.0.0.1', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36', 'http://laravel-whatsapp-server.test/admin/login', 'admin@crudbooster.com login with IP Address 127.0.0.1', '', 1, '2022-04-17 18:07:57', NULL);
-- ----------------------------
-- Table structure for cms_menus
-- ----------------------------
DROP TABLE IF EXISTS `cms_menus`;
CREATE TABLE `cms_menus` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`type` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'url',
`path` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`color` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`icon` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`parent_id` int NULL DEFAULT NULL,
`is_active` tinyint(1) NOT NULL DEFAULT 1,
`is_dashboard` tinyint(1) NOT NULL DEFAULT 0,
`id_cms_privileges` int NULL DEFAULT NULL,
`sorting` int NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 4 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_menus
-- ----------------------------
INSERT INTO `cms_menus` VALUES (1, 'Device', 'Route', 'AdminDeviceControllerGetIndex', NULL, 'fa fa-qrcode', 0, 1, 0, 1, 1, '2022-02-06 02:49:42', NULL);
INSERT INTO `cms_menus` VALUES (2, 'Contact', 'Route', 'AdminContactControllerGetIndex', NULL, 'fa fa-book', 0, 0, 0, 1, 1, '2022-02-06 02:53:46', NULL);
INSERT INTO `cms_menus` VALUES (3, 'Outbox', 'Route', 'AdminOutboxControllerGetIndex', NULL, 'fa fa-send', 0, 1, 0, 1, 3, '2022-02-06 02:59:55', NULL);
-- ----------------------------
-- Table structure for cms_menus_privileges
-- ----------------------------
DROP TABLE IF EXISTS `cms_menus_privileges`;
CREATE TABLE `cms_menus_privileges` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`id_cms_menus` int NULL DEFAULT NULL,
`id_cms_privileges` int NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 4 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_menus_privileges
-- ----------------------------
INSERT INTO `cms_menus_privileges` VALUES (1, 1, 1);
INSERT INTO `cms_menus_privileges` VALUES (2, 2, 1);
INSERT INTO `cms_menus_privileges` VALUES (3, 3, 1);
-- ----------------------------
-- Table structure for cms_moduls
-- ----------------------------
DROP TABLE IF EXISTS `cms_moduls`;
CREATE TABLE `cms_moduls` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`icon` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`path` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`table_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`controller` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`is_protected` tinyint(1) NOT NULL DEFAULT 0,
`is_active` tinyint(1) NOT NULL DEFAULT 0,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 15 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_moduls
-- ----------------------------
INSERT INTO `cms_moduls` VALUES (1, 'Notifications', 'fa fa-cog', 'notifications', 'cms_notifications', 'NotificationsController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (2, 'Privileges', 'fa fa-cog', 'privileges', 'cms_privileges', 'PrivilegesController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (3, 'Privileges Roles', 'fa fa-cog', 'privileges_roles', 'cms_privileges_roles', 'PrivilegesRolesController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (4, 'Users Management', 'fa fa-users', 'users', 'cms_users', 'AdminCmsUsersController', 0, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (5, 'Settings', 'fa fa-cog', 'settings', 'cms_settings', 'SettingsController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (6, 'Module Generator', 'fa fa-database', 'module_generator', 'cms_moduls', 'ModulsController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (7, 'Menu Management', 'fa fa-bars', 'menu_management', 'cms_menus', 'MenusController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (8, 'Email Templates', 'fa fa-envelope-o', 'email_templates', 'cms_email_templates', 'EmailTemplatesController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (9, 'Statistic Builder', 'fa fa-dashboard', 'statistic_builder', 'cms_statistics', 'StatisticBuilderController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (10, 'API Generator', 'fa fa-cloud-download', 'api_generator', '', 'ApiCustomController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (11, 'Log User Access', 'fa fa-flag-o', 'logs', 'cms_logs', 'LogsController', 1, 1, '2022-02-04 09:46:11', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (12, 'Device', 'fa fa-qrcode', 'device', 'device', 'AdminDeviceController', 0, 0, '2022-02-06 02:49:42', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (13, 'Contact', 'fa fa-book', 'contact', 'contact', 'AdminContactController', 0, 0, '2022-02-06 02:53:46', NULL, NULL);
INSERT INTO `cms_moduls` VALUES (14, 'Outbox', 'fa fa-send', 'outbox', 'outbox', 'AdminOutboxController', 0, 0, '2022-02-06 02:59:55', NULL, NULL);
-- ----------------------------
-- Table structure for cms_notifications
-- ----------------------------
DROP TABLE IF EXISTS `cms_notifications`;
CREATE TABLE `cms_notifications` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`id_cms_users` int NULL DEFAULT NULL,
`content` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`url` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`is_read` tinyint(1) NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_notifications
-- ----------------------------
-- ----------------------------
-- Table structure for cms_privileges
-- ----------------------------
DROP TABLE IF EXISTS `cms_privileges`;
CREATE TABLE `cms_privileges` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`is_superadmin` tinyint(1) NULL DEFAULT NULL,
`theme_color` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_privileges
-- ----------------------------
INSERT INTO `cms_privileges` VALUES (1, 'Super Administrator', 1, 'skin-red', '2022-04-15 06:06:21', NULL);
-- ----------------------------
-- Table structure for cms_privileges_roles
-- ----------------------------
DROP TABLE IF EXISTS `cms_privileges_roles`;
CREATE TABLE `cms_privileges_roles` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`is_visible` tinyint(1) NULL DEFAULT NULL,
`is_create` tinyint(1) NULL DEFAULT NULL,
`is_read` tinyint(1) NULL DEFAULT NULL,
`is_edit` tinyint(1) NULL DEFAULT NULL,
`is_delete` tinyint(1) NULL DEFAULT NULL,
`id_cms_privileges` int NULL DEFAULT NULL,
`id_cms_moduls` int NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 12 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_privileges_roles
-- ----------------------------
INSERT INTO `cms_privileges_roles` VALUES (1, 1, 0, 0, 0, 0, 1, 1, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (2, 1, 1, 1, 1, 1, 1, 2, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (3, 0, 1, 1, 1, 1, 1, 3, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (4, 1, 1, 1, 1, 1, 1, 4, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (5, 1, 1, 1, 1, 1, 1, 5, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (6, 1, 1, 1, 1, 1, 1, 6, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (7, 1, 1, 1, 1, 1, 1, 7, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (8, 1, 1, 1, 1, 1, 1, 8, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (9, 1, 1, 1, 1, 1, 1, 9, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (10, 1, 1, 1, 1, 1, 1, 10, '2022-04-15 06:06:21', NULL);
INSERT INTO `cms_privileges_roles` VALUES (11, 1, 0, 1, 0, 1, 1, 11, '2022-04-15 06:06:21', NULL);
-- ----------------------------
-- Table structure for cms_settings
-- ----------------------------
DROP TABLE IF EXISTS `cms_settings`;
CREATE TABLE `cms_settings` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`content` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`content_input_type` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`dataenum` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`helper` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`group_setting` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`label` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 17 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_settings
-- ----------------------------
INSERT INTO `cms_settings` VALUES (1, 'login_background_color', NULL, 'text', NULL, 'Input hexacode', '2022-02-04 09:46:11', NULL, 'Login Register Style', 'Login Background Color');
INSERT INTO `cms_settings` VALUES (2, 'login_font_color', NULL, 'text', NULL, 'Input hexacode', '2022-02-04 09:46:11', NULL, 'Login Register Style', 'Login Font Color');
INSERT INTO `cms_settings` VALUES (3, 'login_background_image', NULL, 'upload_image', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Login Register Style', 'Login Background Image');
INSERT INTO `cms_settings` VALUES (4, 'email_sender', 'support@crudbooster.com', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Email Setting', 'Email Sender');
INSERT INTO `cms_settings` VALUES (5, 'smtp_driver', 'mail', 'select', 'smtp,mail,sendmail', NULL, '2022-02-04 09:46:11', NULL, 'Email Setting', 'Mail Driver');
INSERT INTO `cms_settings` VALUES (6, 'smtp_host', '', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Email Setting', 'SMTP Host');
INSERT INTO `cms_settings` VALUES (7, 'smtp_port', '25', 'text', NULL, 'default 25', '2022-02-04 09:46:11', NULL, 'Email Setting', 'SMTP Port');
INSERT INTO `cms_settings` VALUES (8, 'smtp_username', '', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Email Setting', 'SMTP Username');
INSERT INTO `cms_settings` VALUES (9, 'smtp_password', '', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Email Setting', 'SMTP Password');
INSERT INTO `cms_settings` VALUES (10, 'appname', 'CRUDBooster', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'Application Name');
INSERT INTO `cms_settings` VALUES (11, 'default_paper_size', 'Legal', 'text', NULL, 'Paper size, ex : A4, Legal, etc', '2022-02-04 09:46:11', NULL, 'Application Setting', 'Default Paper Print Size');
INSERT INTO `cms_settings` VALUES (12, 'logo', '', 'upload_image', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'Logo');
INSERT INTO `cms_settings` VALUES (13, 'favicon', '', 'upload_image', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'Favicon');
INSERT INTO `cms_settings` VALUES (14, 'api_debug_mode', 'true', 'select', 'true,false', NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'API Debug Mode');
INSERT INTO `cms_settings` VALUES (15, 'google_api_key', '', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'Google API Key');
INSERT INTO `cms_settings` VALUES (16, 'google_fcm_key', '', 'text', NULL, NULL, '2022-02-04 09:46:11', NULL, 'Application Setting', 'Google FCM Key');
-- ----------------------------
-- Table structure for cms_statistic_components
-- ----------------------------
DROP TABLE IF EXISTS `cms_statistic_components`;
CREATE TABLE `cms_statistic_components` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`id_cms_statistics` int NULL DEFAULT NULL,
`componentID` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`component_name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`area_name` varchar(55) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`sorting` int NULL DEFAULT NULL,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`config` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_statistic_components
-- ----------------------------
-- ----------------------------
-- Table structure for cms_statistics
-- ----------------------------
DROP TABLE IF EXISTS `cms_statistics`;
CREATE TABLE `cms_statistics` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`slug` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_statistics
-- ----------------------------
-- ----------------------------
-- Table structure for cms_users
-- ----------------------------
DROP TABLE IF EXISTS `cms_users`;
CREATE TABLE `cms_users` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`photo` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`password` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`id_cms_privileges` int NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`status` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of cms_users
-- ----------------------------
INSERT INTO `cms_users` VALUES (1, 'Super Admin', NULL, 'admin@crudbooster.com', '$2y$10$AtnJU4LKaZP6BHrgVrVecub5XRLVxMX3UdIi/6nPXJ/NRJmQCvNPm', 1, '2022-04-15 06:06:21', NULL, 'Active');
-- ----------------------------
-- Table structure for contact
-- ----------------------------
DROP TABLE IF EXISTS `contact`;
CREATE TABLE `contact` (
`id` int NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`number` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`multidevice` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`id_users` int NULL DEFAULT NULL,
`id_device` int NULL DEFAULT NULL,
`created_at` datetime NULL DEFAULT NULL,
`updated_at` datetime NULL DEFAULT NULL,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of contact
-- ----------------------------
-- ----------------------------
-- Table structure for device
-- ----------------------------
DROP TABLE IF EXISTS `device`;
CREATE TABLE `device` (
`id` int NOT NULL AUTO_INCREMENT,
`id_users` int NULL DEFAULT NULL,
`number` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`description` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` datetime NULL DEFAULT NULL,
`updated_at` datetime NULL DEFAULT NULL,
`status` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`multidevice` varchar(10) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of device
-- ----------------------------
-- ----------------------------
-- Table structure for failed_jobs
-- ----------------------------
DROP TABLE IF EXISTS `failed_jobs`;
CREATE TABLE `failed_jobs` (
`id` bigint UNSIGNED NOT NULL AUTO_INCREMENT,
`uuid` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`connection` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`queue` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`payload` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`exception` longtext CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`failed_at` timestamp NOT NULL DEFAULT current_timestamp,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `failed_jobs_uuid_unique`(`uuid` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of failed_jobs
-- ----------------------------
-- ----------------------------
-- Table structure for migrations
-- ----------------------------
DROP TABLE IF EXISTS `migrations`;
CREATE TABLE `migrations` (
`id` int UNSIGNED NOT NULL AUTO_INCREMENT,
`migration` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int NOT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 28 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of migrations
-- ----------------------------
INSERT INTO `migrations` VALUES (1, '2016_08_07_145904_add_table_cms_apicustom', 1);
INSERT INTO `migrations` VALUES (2, '2016_08_07_150834_add_table_cms_dashboard', 1);
INSERT INTO `migrations` VALUES (3, '2016_08_07_151210_add_table_cms_logs', 1);
INSERT INTO `migrations` VALUES (4, '2016_08_07_151211_add_details_cms_logs', 1);
INSERT INTO `migrations` VALUES (5, '2016_08_07_152014_add_table_cms_privileges', 1);
INSERT INTO `migrations` VALUES (6, '2016_08_07_152214_add_table_cms_privileges_roles', 1);
INSERT INTO `migrations` VALUES (7, '2016_08_07_152320_add_table_cms_settings', 1);
INSERT INTO `migrations` VALUES (8, '2016_08_07_152421_add_table_cms_users', 1);
INSERT INTO `migrations` VALUES (9, '2016_08_07_154624_add_table_cms_menus_privileges', 1);
INSERT INTO `migrations` VALUES (10, '2016_08_07_154624_add_table_cms_moduls', 1);
INSERT INTO `migrations` VALUES (11, '2016_08_17_225409_add_status_cms_users', 1);
INSERT INTO `migrations` VALUES (12, '2016_08_20_125418_add_table_cms_notifications', 1);
INSERT INTO `migrations` VALUES (13, '2016_09_04_033706_add_table_cms_email_queues', 1);
INSERT INTO `migrations` VALUES (14, '2016_09_16_035347_add_group_setting', 1);
INSERT INTO `migrations` VALUES (15, '2016_09_16_045425_add_label_setting', 1);
INSERT INTO `migrations` VALUES (16, '2016_09_17_104728_create_nullable_cms_apicustom', 1);
INSERT INTO `migrations` VALUES (17, '2016_10_01_141740_add_method_type_apicustom', 1);
INSERT INTO `migrations` VALUES (18, '2016_10_01_141846_add_parameters_apicustom', 1);
INSERT INTO `migrations` VALUES (19, '2016_10_01_141934_add_responses_apicustom', 1);
INSERT INTO `migrations` VALUES (20, '2016_10_01_144826_add_table_apikey', 1);
INSERT INTO `migrations` VALUES (21, '2016_11_14_141657_create_cms_menus', 1);
INSERT INTO `migrations` VALUES (22, '2016_11_15_132350_create_cms_email_templates', 1);
INSERT INTO `migrations` VALUES (23, '2016_11_15_190410_create_cms_statistics', 1);
INSERT INTO `migrations` VALUES (24, '2016_11_17_102740_create_cms_statistic_components', 1);
INSERT INTO `migrations` VALUES (25, '2017_06_06_164501_add_deleted_at_cms_moduls', 1);
INSERT INTO `migrations` VALUES (26, '2019_12_14_000001_create_personal_access_tokens_table', 1);
INSERT INTO `migrations` VALUES (27, '2022_02_06_161234_create_laravel_wa_server_table', 1);
-- ----------------------------
-- Table structure for outbox
-- ----------------------------
DROP TABLE IF EXISTS `outbox`;
CREATE TABLE `outbox` (
`id` int NOT NULL AUTO_INCREMENT,
`number` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`text` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`status` varchar(5) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` datetime NULL DEFAULT NULL,
`id_device` int NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of outbox
-- ----------------------------
-- ----------------------------
-- Table structure for password_resets
-- ----------------------------
DROP TABLE IF EXISTS `password_resets`;
CREATE TABLE `password_resets` (
`email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
INDEX `password_resets_email_index`(`email` ASC) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of password_resets
-- ----------------------------
-- ----------------------------
-- Table structure for personal_access_tokens
-- ----------------------------
DROP TABLE IF EXISTS `personal_access_tokens`;
CREATE TABLE `personal_access_tokens` (
`id` bigint UNSIGNED NOT NULL AUTO_INCREMENT,
`tokenable_type` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`tokenable_id` bigint UNSIGNED NOT NULL,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`abilities` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL,
`last_used_at` timestamp NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `personal_access_tokens_token_unique`(`token` ASC) USING BTREE,
INDEX `personal_access_tokens_tokenable_type_tokenable_id_index`(`tokenable_type` ASC, `tokenable_id` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of personal_access_tokens
-- ----------------------------
-- ----------------------------
-- Table structure for users
-- ----------------------------
DROP TABLE IF EXISTS `users`;
CREATE TABLE `users` (
`id` bigint UNSIGNED NOT NULL AUTO_INCREMENT,
`name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`password` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`remember_token` varchar(100) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `users_email_unique`(`email` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of users
-- ----------------------------
-- ----------------------------
-- Table structure for whatsapp_log
-- ----------------------------
DROP TABLE IF EXISTS `whatsapp_log`;
CREATE TABLE `whatsapp_log` (
`id` int NOT NULL AUTO_INCREMENT,
`number` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`message` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`session_id` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`status` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NULL DEFAULT NULL,
`tgl` datetime NULL DEFAULT current_timestamp,
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_unicode_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- Records of whatsapp_log
-- ----------------------------
SET FOREIGN_KEY_CHECKS = 1; | the_stack |
-- Use PolyBase to load public data from Azure blob storage into the Wide World Importers Data Warehouse schema.
--
-- This script:
--
-- 1. Configures PolyBase for loading from a public blob storage container.
-- 2. Loads the data into columnstore indexes
-- 3. Generates millions of rows in the date dimension and sales fact tables
-- 3. Performs optimizations after the load and data generation is finished.
--
-- Before you begin:
-- To run this tutorial, you need an Azure account that already has a SQL Data Warehouse database.
-- If you don't already have this, see
-- http://azure.microsoft.com/documentation/articles/sql-data-warehouse-get-started-provision.md
--
-- You also need to have created a login and user that will be used for loading data.
-- The server admin account is meant to perform management operations, and is not suited for running queries on user data.
--
-- For more explanation about the loading process, this article on azure.microsoft.com explains the process in more detail.
-- https://docs.microsoft.com/en-us/azure/sql-data-warehouse/load-data-wideworldimportersdw
-- ----------------------------------- STEP 1: Create an external data source -----------------------------------------------------------------------------
-- TYPE: HADOOP - PolyBase uses Hadoop APIs to access data in Azure blob storage.
-- LOCATION: Azure Storage account where the World Wide Importers data set is stored
CREATE EXTERNAL DATA SOURCE WWIStorage
WITH
(
TYPE = Hadoop,
LOCATION = 'wasbs://wideworldimporters@sqldwholdata.blob.core.windows.net'
);
-- Specify the formatting characteristics and options for the external data file.
-- This statement specifies the external data is stored as text and the values are separated by the pipe ('|') character.
CREATE EXTERNAL FILE FORMAT TextFileFormat
WITH
(
FORMAT_TYPE = DELIMITEDTEXT,
FORMAT_OPTIONS
(
FIELD_TERMINATOR = '|',
USE_TYPE_DEFAULT = FALSE
)
);
-- ----------------------------------STEP 2: Create schema to organize loading data and local table data -----------------------------------------------------
-- Create ext schema. It provides a way to organize the external tables you are about to create for loading data
CREATE SCHEMA ext;
GO
-- Create wwi schema. It organizes the standard tables that will contain the data.
CREATE SCHEMA wwi;
GO
-- ----------------------------------STEP 3: Create external table defintions --------------------------------------------------------------------------------
-- Create external tables.
-- The table definitions are stored in SQL Data Warehouse, but the tables reference data that is stored in Azure blob storage.
CREATE EXTERNAL TABLE [ext].[dimension_City](
[City Key] [int] NOT NULL,
[WWI City ID] [int] NOT NULL,
[City] [nvarchar](50) NOT NULL,
[State Province] [nvarchar](50) NOT NULL,
[Country] [nvarchar](60) NOT NULL,
[Continent] [nvarchar](30) NOT NULL,
[Sales Territory] [nvarchar](50) NOT NULL,
[Region] [nvarchar](30) NOT NULL,
[Subregion] [nvarchar](30) NOT NULL,
[Location] [nvarchar](76) NULL,
[Latest Recorded Population] [bigint] NOT NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH (LOCATION='/v1/dimension_City/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_Customer] (
[Customer Key] [int] NOT NULL,
[WWI Customer ID] [int] NOT NULL,
[Customer] [nvarchar](100) NOT NULL,
[Bill To Customer] [nvarchar](100) NOT NULL,
[Category] [nvarchar](50) NOT NULL,
[Buying Group] [nvarchar](50) NOT NULL,
[Primary Contact] [nvarchar](50) NOT NULL,
[Postal Code] [nvarchar](10) NOT NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH (LOCATION='/v1/dimension_Customer/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_Employee] (
[Employee Key] [int] NOT NULL,
[WWI Employee ID] [int] NOT NULL,
[Employee] [nvarchar](50) NOT NULL,
[Preferred Name] [nvarchar](50) NOT NULL,
[Is Salesperson] [bit] NOT NULL,
[Photo] [varbinary](300) NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION='/v1/dimension_Employee/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_PaymentMethod] (
[Payment Method Key] [int] NOT NULL,
[WWI Payment Method ID] [int] NOT NULL,
[Payment Method] [nvarchar](50) NOT NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/dimension_PaymentMethod/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_StockItem](
[Stock Item Key] [int] NOT NULL,
[WWI Stock Item ID] [int] NOT NULL,
[Stock Item] [nvarchar](100) NOT NULL,
[Color] [nvarchar](20) NOT NULL,
[Selling Package] [nvarchar](50) NOT NULL,
[Buying Package] [nvarchar](50) NOT NULL,
[Brand] [nvarchar](50) NOT NULL,
[Size] [nvarchar](20) NOT NULL,
[Lead Time Days] [int] NOT NULL,
[Quantity Per Outer] [int] NOT NULL,
[Is Chiller Stock] [bit] NOT NULL,
[Barcode] [nvarchar](50) NULL,
[Tax Rate] [decimal](18, 3) NOT NULL,
[Unit Price] [decimal](18, 2) NOT NULL,
[Recommended Retail Price] [decimal](18, 2) NULL,
[Typical Weight Per Unit] [decimal](18, 3) NOT NULL,
[Photo] [varbinary](300) NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/dimension_StockItem/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_Supplier](
[Supplier Key] [int] NOT NULL,
[WWI Supplier ID] [int] NOT NULL,
[Supplier] [nvarchar](100) NOT NULL,
[Category] [nvarchar](50) NOT NULL,
[Primary Contact] [nvarchar](50) NOT NULL,
[Supplier Reference] [nvarchar](20) NULL,
[Payment Days] [int] NOT NULL,
[Postal Code] [nvarchar](10) NOT NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/dimension_Supplier/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[dimension_TransactionType](
[Transaction Type Key] [int] NOT NULL,
[WWI Transaction Type ID] [int] NOT NULL,
[Transaction Type] [nvarchar](50) NOT NULL,
[Valid From] [datetime2](7) NOT NULL,
[Valid To] [datetime2](7) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/dimension_TransactionType/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_Movement] (
[Movement Key] [bigint] NOT NULL,
[Date Key] [date] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[Customer Key] [int] NULL,
[Supplier Key] [int] NULL,
[Transaction Type Key] [int] NOT NULL,
[WWI Stock Item Transaction ID] [int] NOT NULL,
[WWI Invoice ID] [int] NULL,
[WWI Purchase Order ID] [int] NULL,
[Quantity] [int] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_Movement/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_Order] (
[Order Key] [bigint] NOT NULL,
[City Key] [int] NOT NULL,
[Customer Key] [int] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[Order Date Key] [date] NOT NULL,
[Picked Date Key] [date] NULL,
[Salesperson Key] [int] NOT NULL,
[Picker Key] [int] NULL,
[WWI Order ID] [int] NOT NULL,
[WWI Backorder ID] [int] NULL,
[Description] [nvarchar](100) NOT NULL,
[Package] [nvarchar](50) NOT NULL,
[Quantity] [int] NOT NULL,
[Unit Price] [decimal](18, 2) NOT NULL,
[Tax Rate] [decimal](18, 3) NOT NULL,
[Total Excluding Tax] [decimal](18, 2) NOT NULL,
[Tax Amount] [decimal](18, 2) NOT NULL,
[Total Including Tax] [decimal](18, 2) NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_Order/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_Purchase] (
[Purchase Key] [bigint] NOT NULL,
[Date Key] [date] NOT NULL,
[Supplier Key] [int] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[WWI Purchase Order ID] [int] NULL,
[Ordered Outers] [int] NOT NULL,
[Ordered Quantity] [int] NOT NULL,
[Received Outers] [int] NOT NULL,
[Package] [nvarchar](50) NOT NULL,
[Is Order Finalized] [bit] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_Purchase/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_Sale] (
[Sale Key] [bigint] NOT NULL,
[City Key] [int] NOT NULL,
[Customer Key] [int] NOT NULL,
[Bill To Customer Key] [int] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[Invoice Date Key] [date] NOT NULL,
[Delivery Date Key] [date] NULL,
[Salesperson Key] [int] NOT NULL,
[WWI Invoice ID] [int] NOT NULL,
[Description] [nvarchar](100) NOT NULL,
[Package] [nvarchar](50) NOT NULL,
[Quantity] [int] NOT NULL,
[Unit Price] [decimal](18, 2) NOT NULL,
[Tax Rate] [decimal](18, 3) NOT NULL,
[Total Excluding Tax] [decimal](18, 2) NOT NULL,
[Tax Amount] [decimal](18, 2) NOT NULL,
[Profit] [decimal](18, 2) NOT NULL,
[Total Including Tax] [decimal](18, 2) NOT NULL,
[Total Dry Items] [int] NOT NULL,
[Total Chiller Items] [int] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_Sale/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_StockHolding] (
[Stock Holding Key] [bigint] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[Quantity On Hand] [int] NOT NULL,
[Bin Location] [nvarchar](20) NOT NULL,
[Last Stocktake Quantity] [int] NOT NULL,
[Last Cost Price] [decimal](18, 2) NOT NULL,
[Reorder Level] [int] NOT NULL,
[Target Stock Level] [int] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_StockHolding/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
CREATE EXTERNAL TABLE [ext].[fact_Transaction] (
[Transaction Key] [bigint] NOT NULL,
[Date Key] [date] NOT NULL,
[Customer Key] [int] NULL,
[Bill To Customer Key] [int] NULL,
[Supplier Key] [int] NULL,
[Transaction Type Key] [int] NOT NULL,
[Payment Method Key] [int] NULL,
[WWI Customer Transaction ID] [int] NULL,
[WWI Supplier Transaction ID] [int] NULL,
[WWI Invoice ID] [int] NULL,
[WWI Purchase Order ID] [int] NULL,
[Supplier Invoice Number] [nvarchar](20) NULL,
[Total Excluding Tax] [decimal](18, 2) NOT NULL,
[Tax Amount] [decimal](18, 2) NOT NULL,
[Total Including Tax] [decimal](18, 2) NOT NULL,
[Outstanding Balance] [decimal](18, 2) NOT NULL,
[Is Finalized] [bit] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH ( LOCATION ='/v1/fact_Transaction/',
DATA_SOURCE = WWIStorage,
FILE_FORMAT = TextFileFormat,
REJECT_TYPE = VALUE,
REJECT_VALUE = 0
);
-- ----------------------------------STEP 4: Load data into the external tables --------------------------------------------------------------------------------
-- The script below does not load data into the wwi.dimension_Date and wwi.fact_Sales tables.
-- You will generate the data for this tables in a later step to make sure the tables have a sizeable number of rows
CREATE TABLE [wwi].[dimension_City]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_City]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_City]')
;
CREATE TABLE [wwi].[dimension_Customer]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_Customer]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_Customer]')
;
CREATE TABLE [wwi].[dimension_Employee]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_Employee]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_Employee]')
;
CREATE TABLE [wwi].[dimension_PaymentMethod]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_PaymentMethod]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_PaymentMethod]')
;
CREATE TABLE [wwi].[dimension_StockItem]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_StockItem]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_StockItem]')
;
CREATE TABLE [wwi].[dimension_Supplier]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_Supplier]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_Supplier]')
;
CREATE TABLE [wwi].[dimension_TransactionType]
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[dimension_TransactionType]
OPTION (LABEL = 'CTAS : Load [wwi].[dimension_TransactionType]')
;
CREATE TABLE [wwi].[fact_Movement]
WITH
(
DISTRIBUTION = HASH([Movement Key]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_Movement]
OPTION (LABEL = 'CTAS : Load [wwi].[fact_Movement]')
;
CREATE TABLE [wwi].[fact_Order]
WITH
(
DISTRIBUTION = HASH([Order Key]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_Order]
OPTION (LABEL = 'CTAS : Load [wwi].[fact_Order]')
;
CREATE TABLE [wwi].[fact_Purchase]
WITH
(
DISTRIBUTION = HASH([Purchase Key]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_Purchase]
OPTION (LABEL = 'CTAS : Load [wwi].[fact_Purchase]')
;
CREATE TABLE [wwi].[seed_Sale]
WITH
(
DISTRIBUTION = HASH([WWI Invoice ID]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_Sale]
OPTION (LABEL = 'CTAS : Load [wwi].[seed_Sale]')
;
CREATE TABLE [wwi].[fact_StockHolding]
WITH
(
DISTRIBUTION = HASH([Stock Holding Key]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_StockHolding]
OPTION (LABEL = 'CTAS : Load [wwi].[fact_StockHolding]')
;
CREATE TABLE [wwi].[fact_Transaction]
WITH
(
DISTRIBUTION = HASH([Transaction Key]),
CLUSTERED COLUMNSTORE INDEX
)
AS
SELECT * FROM [ext].[fact_Transaction]
OPTION (LABEL = 'CTAS : Load [wwi].[fact_Transaction]')
;
-- ----------------------------------STEP 5: Track the progress of the load --------------------------------------------------------------------------------
-- At this point, you're loading several GBs of data into your data warehouse and compressing it into highly performant clustered columnstored indexes.
-- The query below is commented out, but you can uncomment and run it to show the status of all load operations
-- SELECT
-- r.command,
-- s.request_id,
-- r.status,
-- count(distinct input_name) as nbr_files,
-- sum(s.bytes_processed)/1024/1024/1024 as gb_processed
-- FROM
-- sys.dm_pdw_exec_requests r
-- INNER JOIN sys.dm_pdw_dms_external_work s
-- ON r.request_id = s.request_id
-- WHERE
-- r.[label] = 'CTAS : Load [wwi].[dimension_City]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_Customer]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_Employee]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_PaymentMethod]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_StockItem]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_Supplier]' OR
-- r.[label] = 'CTAS : Load [wwi].[dimension_TransactionType]' OR
-- r.[label] = 'CTAS : Load [wwi].[fact_Movement]' OR
-- r.[label] = 'CTAS : Load [wwi].[fact_Order]' OR
-- r.[label] = 'CTAS : Load [wwi].[fact_Purchase]' OR
-- r.[label] = 'CTAS : Load [wwi].[fact_StockHolding]' OR
-- r.[label] = 'CTAS : Load [wwi].[fact_Transaction]'
-- GROUP BY
-- r.command,
-- s.request_id,
-- r.status
-- ORDER BY
-- nbr_files desc,
-- gb_processed desc;
-- ----------------------------------STEP 6: Generate millions of rows for the Date dimension table and Sales fact table --------------------------------------------------------------------------------
-- Create 'wwi.dimension_Date' and 'wwi.fact_Sales' tables
CREATE TABLE [wwi].[dimension_Date]
(
[Date] [datetime] NOT NULL,
[Day Number] [int] NOT NULL,
[Day] [nvarchar](10) NOT NULL,
[Month] [nvarchar](10) NOT NULL,
[Short Month] [nvarchar](3) NOT NULL,
[Calendar Month Number] [int] NOT NULL,
[Calendar Month Label] [nvarchar](20) NOT NULL,
[Calendar Year] [int] NOT NULL,
[Calendar Year Label] [nvarchar](10) NOT NULL,
[Fiscal Month Number] [int] NOT NULL,
[Fiscal Month Label] [nvarchar](20) NOT NULL,
[Fiscal Year] [int] NOT NULL,
[Fiscal Year Label] [nvarchar](10) NOT NULL,
[ISO Week Number] [int] NOT NULL
)
WITH
(
DISTRIBUTION = REPLICATE,
CLUSTERED INDEX ([Date])
);
CREATE TABLE [wwi].[fact_Sale]
(
[Sale Key] [bigint] IDENTITY(1,1) NOT NULL,
[City Key] [int] NOT NULL,
[Customer Key] [int] NOT NULL,
[Bill To Customer Key] [int] NOT NULL,
[Stock Item Key] [int] NOT NULL,
[Invoice Date Key] [date] NOT NULL,
[Delivery Date Key] [date] NULL,
[Salesperson Key] [int] NOT NULL,
[WWI Invoice ID] [int] NOT NULL,
[Description] [nvarchar](100) NOT NULL,
[Package] [nvarchar](50) NOT NULL,
[Quantity] [int] NOT NULL,
[Unit Price] [decimal](18, 2) NOT NULL,
[Tax Rate] [decimal](18, 3) NOT NULL,
[Total Excluding Tax] [decimal](18, 2) NOT NULL,
[Tax Amount] [decimal](18, 2) NOT NULL,
[Profit] [decimal](18, 2) NOT NULL,
[Total Including Tax] [decimal](18, 2) NOT NULL,
[Total Dry Items] [int] NOT NULL,
[Total Chiller Items] [int] NOT NULL,
[Lineage Key] [int] NOT NULL
)
WITH
(
DISTRIBUTION = HASH ( [WWI Invoice ID] ),
CLUSTERED COLUMNSTORE INDEX
)
-- Create [wwi].[InitialSalesDataPopulation] to increase the number of rows in [wwi].[seed_Sale] by a factor of eight.
CREATE PROCEDURE [wwi].[InitialSalesDataPopulation] AS
BEGIN
INSERT INTO [wwi].[seed_Sale] (
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
)
SELECT
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
FROM [wwi].[seed_Sale]
INSERT INTO [wwi].[seed_Sale] (
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
)
SELECT
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
FROM [wwi].[seed_Sale]
INSERT INTO [wwi].[seed_Sale] (
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
)
SELECT
[Sale Key], [City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], [Package], [Quantity], [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], [Profit], [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
FROM [wwi].[seed_Sale]
END
-- Create this stored procedure that populates rows into wwi.dimension_Date.
CREATE PROCEDURE [wwi].[PopulateDateDimensionForYear] @Year [int] AS
BEGIN
IF OBJECT_ID('tempdb..#month', 'U') IS NOT NULL
DROP TABLE #month
CREATE TABLE #month (
monthnum int,
numofdays int
)
WITH ( DISTRIBUTION = ROUND_ROBIN, heap )
INSERT INTO #month
SELECT 1, 31 UNION SELECT 2, CASE WHEN (@YEAR % 4 = 0 AND @YEAR % 100 <> 0) OR @YEAR % 400 = 0 THEN 29 ELSE 28 END UNION SELECT 3,31 UNION SELECT 4,30 UNION SELECT 5,31 UNION SELECT 6,30 UNION SELECT 7,31 UNION SELECT 8,31 UNION SELECT 9,30 UNION SELECT 10,31 UNION SELECT 11,30 UNION SELECT 12,31
IF OBJECT_ID('tempdb..#days', 'U') IS NOT NULL
DROP TABLE #days
CREATE TABLE #days (days int)
WITH (DISTRIBUTION = ROUND_ROBIN, HEAP)
INSERT INTO #days
SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9 UNION SELECT 10 UNION SELECT 11 UNION SELECT 12 UNION SELECT 13 UNION SELECT 14 UNION SELECT 15 UNION SELECT 16 UNION SELECT 17 UNION SELECT 18 UNION SELECT 19 UNION SELECT 20 UNION SELECT 21 UNION SELECT 22 UNION SELECT 23 UNION SELECT 24 UNION SELECT 25 UNION SELECT 26 UNION SELECT 27 UNION SELECT 28 UNION SELECT 29 UNION SELECT 30 UNION SELECT 31
INSERT [wwi].[dimension_Date] (
[Date], [Day Number], [Day], [Month], [Short Month], [Calendar Month Number], [Calendar Month Label], [Calendar Year], [Calendar Year Label], [Fiscal Month Number], [Fiscal Month Label], [Fiscal Year], [Fiscal Year Label], [ISO Week Number]
)
SELECT
CAST(CAST(monthnum AS VARCHAR(2)) + '/' + CAST([days] AS VARCHAR(3)) + '/' + CAST(@year AS CHAR(4)) AS DATE) AS [Date]
,DAY(CAST(CAST(monthnum AS VARCHAR(2)) + '/' + CAST([days] AS VARCHAR(3)) + '/' + CAST(@year AS CHAR(4)) AS DATE)) AS [Day Number]
,CAST(DATENAME(day, CAST(CAST(monthnum AS VARCHAR(2)) + '/' + CAST([days] AS VARCHAR(3)) + '/' + CAST(@year AS CHAR(4)) AS DATE)) AS NVARCHAR(10)) AS [Day]
,CAST(DATENAME(month, CAST(CAST(monthnum AS VARCHAR(2)) + '/' + CAST([days] AS VARCHAR(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS nvarchar(10)) AS [Month]
,CAST(SUBSTRING(DATENAME(month, CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)), 1, 3) AS nvarchar(3)) AS [Short Month]
,MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS [Calendar Month Number]
,CAST(N'CY' + CAST(YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS nvarchar(4)) + N'-' + SUBSTRING(DATENAME(month, CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)), 1, 3) AS nvarchar(10)) AS [Calendar Month Label]
,YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS [Calendar Year]
,CAST(N'CY' + CAST(YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS nvarchar(4)) AS nvarchar(10)) AS [Calendar Year Label]
,CASE WHEN MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) IN (11, 12)
THEN MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) - 10
ELSE MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) + 2 END AS [Fiscal Month Number]
,CAST(N'FY' + CAST(CASE WHEN MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) IN (11, 12)
THEN YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) + 1
ELSE YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) END AS nvarchar(4)) + N'-' + SUBSTRING(DATENAME(month, CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)), 1, 3) AS nvarchar(20)) AS [Fiscal Month Label]
,CASE WHEN MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) IN (11, 12)
THEN YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) + 1
ELSE YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) END AS [Fiscal Year]
,CAST(N'FY' + CAST(CASE WHEN MONTH(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) IN (11, 12)
THEN YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) + 1
ELSE YEAR(CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE))END AS nvarchar(4)) AS nvarchar(10)) AS [Fiscal Year Label]
, DATEPART(ISO_WEEK, CAST(CAST(monthnum as varchar(2)) + '/' + CAST([days] as varchar(3)) + '/' + CAST(@year as char(4)) AS DATE)) AS [ISO Week Number]
FROM #month m
CROSS JOIN #days d
WHERE d.days <= m.numofdays
DROP table #month;
DROP table #days;
END;
-- Create procedure that populates the wwi.dimension_Date and wwi.fact_Sales tables.
-- It calls [wwi].[PopulateDateDimensionForYear] to populate wwi.dimension_Date.
CREATE PROCEDURE [wwi].[Configuration_PopulateLargeSaleTable] @EstimatedRowsPerDay [bigint],@Year [int] AS
BEGIN
SET NOCOUNT ON;
SET XACT_ABORT ON;
EXEC [wwi].[PopulateDateDimensionForYear] @Year;
DECLARE @OrderCounter bigint = 0;
DECLARE @NumberOfSalesPerDay bigint = @EstimatedRowsPerDay;
DECLARE @DateCounter date;
DECLARE @StartingSaleKey bigint;
DECLARE @MaximumSaleKey bigint = (SELECT MAX([Sale Key]) FROM wwi.seed_Sale);
DECLARE @MaxDate date;
SET @MaxDate = (SELECT MAX([Invoice Date Key]) FROM wwi.fact_Sale)
IF ( @MaxDate < CAST(@YEAR AS CHAR(4)) + '1231') AND (@MaxDate > CAST(@YEAR AS CHAR(4)) + '0101')
SET @DateCounter = @MaxDate
ELSE
SET @DateCounter= CAST(@Year as char(4)) + '0101';
PRINT 'Targeting ' + CAST(@NumberOfSalesPerDay AS varchar(20)) + ' sales per day.';
DECLARE @OutputCounter varchar(20);
DECLARE @variance DECIMAL(18,10);
DECLARE @VariantNumberOfSalesPerDay BIGINT;
WHILE @DateCounter < CAST(@YEAR AS CHAR(4)) + '1231'
BEGIN
SET @OutputCounter = CONVERT(varchar(20), @DateCounter, 112);
RAISERROR(@OutputCounter, 0, 1);
SET @variance = (SELECT RAND() * 10)*.01 + .95
SET @VariantNumberOfSalesPerDay = FLOOR(@NumberOfSalesPerDay * @variance)
SET @StartingSaleKey = @MaximumSaleKey - @VariantNumberOfSalesPerDay - FLOOR(RAND() * 20000);
SET @OrderCounter = 0;
INSERT [wwi].[fact_Sale] (
[City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], [Invoice Date Key], [Delivery Date Key], [Salesperson Key], [WWI Invoice ID], [Description], Package, Quantity, [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], Profit, [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
)
SELECT TOP(@VariantNumberOfSalesPerDay)
[City Key], [Customer Key], [Bill To Customer Key], [Stock Item Key], @DateCounter, DATEADD(day, 1, @DateCounter), [Salesperson Key], [WWI Invoice ID], [Description], Package, Quantity, [Unit Price], [Tax Rate], [Total Excluding Tax], [Tax Amount], Profit, [Total Including Tax], [Total Dry Items], [Total Chiller Items], [Lineage Key]
FROM [wwi].[seed_Sale]
WHERE
--[Sale Key] > @StartingSaleKey and /* IDENTITY DOES NOT WORK THE SAME IN SQLDW AND CAN'T USE THIS METHOD FOR VARIANT */
[Invoice Date Key] >=cast(@YEAR AS CHAR(4)) + '-01-01'
ORDER BY [Sale Key];
SET @DateCounter = DATEADD(day, 1, @DateCounter);
END;
END;
-- Generate millions of rows
EXEC [wwi].[InitialSalesDataPopulation]
-- Populate wwi.fact_Sales with 100,000 rows per day for each day in the year 2000.
EXEC [wwi].[Configuration_PopulateLargeSaleTable] 100000, 2000
-- The data generation in the previous step might take a while as it progresses through the year.
-- To see which day the current process is on, uncomment the query below and run it:
--SELECT MAX([Invoice Date Key]) FROM wwi.fact_Sale;
-- ----------------------------------STEP 7: Populate the replicated table cache to speed up later queries --------------------------------------------------------------------------------
-- SQL Data Warehouse replicates a table by caching the data to each Compute node. The cache only gets populated when a query runs against the table.
-- Therefore, the first query on a replicated table might require extra time to populate the cache. After the cache is populated, queries on replicated tables run faster.
SELECT TOP 1 * FROM [wwi].[dimension_City];
SELECT TOP 1 * FROM [wwi].[dimension_Customer];
SELECT TOP 1 * FROM [wwi].[dimension_Date];
SELECT TOP 1 * FROM [wwi].[dimension_Employee];
SELECT TOP 1 * FROM [wwi].[dimension_PaymentMethod];
SELECT TOP 1 * FROM [wwi].[dimension_StockItem];
SELECT TOP 1 * FROM [wwi].[dimension_Supplier];
SELECT TOP 1 * FROM [wwi].[dimension_TransactionType];
-- ----------------------------------STEP 8: Create statistics on newly loaded data --------------------------------------------------------------------------------------------------
-- To achieve high query performance, it's important to create statistics on each column of each table after the first load.
-- It's also important to update statistics after substantial changes in the data.
-- Create stored procedure that updates statistics on all columns of all tables.
CREATE PROCEDURE [dbo].[prc_sqldw_create_stats]
( @create_type tinyint -- 1 default 2 Fullscan 3 Sample
, @sample_pct tinyint
)
AS
IF @create_type IS NULL
BEGIN
SET @create_type = 1;
END;
IF @create_type NOT IN (1,2,3)
BEGIN
THROW 151000,'Invalid value for @stats_type parameter. Valid range 1 (default), 2 (fullscan) or 3 (sample).',1;
END;
IF @sample_pct IS NULL
BEGIN;
SET @sample_pct = 20;
END;
IF OBJECT_ID('tempdb..#stats_ddl') IS NOT NULL
BEGIN;
DROP TABLE #stats_ddl;
END;
CREATE TABLE #stats_ddl
WITH ( DISTRIBUTION = HASH([seq_nmbr])
, LOCATION = USER_DB
)
AS
WITH T
AS
(
SELECT t.[name] AS [table_name]
, s.[name] AS [table_schema_name]
, c.[name] AS [column_name]
, c.[column_id] AS [column_id]
, t.[object_id] AS [object_id]
, ROW_NUMBER()
OVER(ORDER BY (SELECT NULL)) AS [seq_nmbr]
FROM sys.[tables] t
JOIN sys.[schemas] s ON t.[schema_id] = s.[schema_id]
JOIN sys.[columns] c ON t.[object_id] = c.[object_id]
LEFT JOIN sys.[stats_columns] l ON l.[object_id] = c.[object_id]
AND l.[column_id] = c.[column_id]
AND l.[stats_column_id] = 1
LEFT JOIN sys.[external_tables] e ON e.[object_id] = t.[object_id]
WHERE l.[object_id] IS NULL
AND e.[object_id] IS NULL -- not an external table
)
SELECT [table_schema_name]
, [table_name]
, [column_name]
, [column_id]
, [object_id]
, [seq_nmbr]
, CASE @create_type
WHEN 1
THEN CAST('CREATE STATISTICS '+QUOTENAME('stat_'+table_schema_name+ '_' + table_name + '_'+column_name)+' ON '+QUOTENAME(table_schema_name)+'.'+QUOTENAME(table_name)+'('+QUOTENAME(column_name)+')' AS VARCHAR(8000))
WHEN 2
THEN CAST('CREATE STATISTICS '+QUOTENAME('stat_'+table_schema_name+ '_' + table_name + '_'+column_name)+' ON '+QUOTENAME(table_schema_name)+'.'+QUOTENAME(table_name)+'('+QUOTENAME(column_name)+') WITH FULLSCAN' AS VARCHAR(8000))
WHEN 3
THEN CAST('CREATE STATISTICS '+QUOTENAME('stat_'+table_schema_name+ '_' + table_name + '_'+column_name)+' ON '+QUOTENAME(table_schema_name)+'.'+QUOTENAME(table_name)+'('+QUOTENAME(column_name)+') WITH SAMPLE '+CONVERT(varchar(4),@sample_pct)+' PERCENT' AS VARCHAR(8000))
END AS create_stat_ddl
FROM T
;
DECLARE @i INT = 1
, @t INT = (SELECT COUNT(*) FROM #stats_ddl)
, @s NVARCHAR(4000) = N''
;
WHILE @i <= @t
BEGIN
SET @s=(SELECT create_stat_ddl FROM #stats_ddl WHERE seq_nmbr = @i);
PRINT @s
EXEC sp_executesql @s
SET @i+=1;
END
DROP TABLE #stats_ddl;
-- Create statistics on all columns of all tables in the data warehouse.
EXEC [dbo].[prc_sqldw_create_stats] 1, NULL;
-- Achievement unlocked!
--
-- You now have all the public data loaded into Azure SQL Data Warehouse. Great job!
-- Enjoy exploring with SQL Data Warehouse. | the_stack |
-- complain if script is sourced in psql, rather than via CREATE EXTENSION
\echo Use "CREATE EXTENSION pg_shard" to load this file. \quit
-- needed in our views
CREATE FUNCTION column_to_column_name(table_oid oid, column_var text)
RETURNS text
AS 'MODULE_PATHNAME'
LANGUAGE C STABLE STRICT;
DO $$
DECLARE
use_citus_metadata boolean := false;
BEGIN
BEGIN
PERFORM 'pg_catalog.pg_dist_partition'::regclass;
use_citus_metadata = true;
EXCEPTION
WHEN undefined_table THEN
use_citus_metadata = false;
END;
IF use_citus_metadata THEN
CREATE FUNCTION adapt_and_insert_shard() RETURNS TRIGGER AS $aais$
BEGIN
IF NEW.id IS NULL THEN
NEW.id = nextval('pg_dist_shardid_seq');
END IF;
INSERT INTO pg_dist_shard
(logicalrelid,
shardid,
shardstorage,
shardalias,
shardminvalue,
shardmaxvalue)
VALUES (NEW.relation_id,
NEW.id,
NEW.storage,
NULL,
NEW.min_value,
NEW.max_value);
RETURN NEW;
END
$aais$ LANGUAGE plpgsql;
CREATE FUNCTION adapt_and_insert_shard_placement() RETURNS trigger AS $aaisp$
BEGIN
INSERT INTO pg_dist_shard_placement
(shardid,
shardstate,
shardlength,
nodename,
nodeport)
VALUES (NEW.shard_id,
NEW.shard_state,
0,
NEW.node_name,
NEW.node_port)
RETURNING oid INTO STRICT NEW.id;
RETURN NEW;
END
$aaisp$ LANGUAGE plpgsql;
CREATE FUNCTION adapt_and_insert_partition() RETURNS trigger AS $aaip$
BEGIN
INSERT INTO pg_dist_partition
(logicalrelid,
partmethod,
partkey)
VALUES (NEW.relation_id,
NEW.partition_method,
column_name_to_column(NEW.relation_id, NEW.key));
RETURN NEW;
END
$aaip$ LANGUAGE plpgsql;
CREATE FUNCTION adapt_and_update_partition() RETURNS trigger AS $aaup$
BEGIN
UPDATE pg_dist_partition
SET logicalrelid = NEW.relation_id,
partmethod = NEW.partition_method,
partkey = column_name_to_column(NEW.relation_id, NEW.key)
WHERE logicalrelid = OLD.relation_id;
RETURN NEW;
END
$aaup$ LANGUAGE plpgsql;
-- metadata relations are views under CitusDB
CREATE SCHEMA pgs_distribution_metadata
CREATE VIEW shard AS
SELECT shardid AS id,
logicalrelid AS relation_id,
shardstorage AS storage,
shardminvalue AS min_value,
shardmaxvalue AS max_value
FROM pg_dist_shard
CREATE TRIGGER shard_insert INSTEAD OF INSERT ON shard
FOR EACH ROW
EXECUTE PROCEDURE adapt_and_insert_shard()
CREATE VIEW shard_placement AS
SELECT oid::bigint AS id,
shardid AS shard_id,
shardstate AS shard_state,
nodename AS node_name,
nodeport AS node_port
FROM pg_dist_shard_placement
CREATE TRIGGER shard_placement_insert INSTEAD OF INSERT ON shard_placement
FOR EACH ROW
EXECUTE PROCEDURE adapt_and_insert_shard_placement()
CREATE VIEW partition AS
SELECT logicalrelid AS relation_id,
partmethod AS partition_method,
column_to_column_name(logicalrelid, partkey) AS key
FROM pg_dist_partition
CREATE TRIGGER partition_insert INSTEAD OF INSERT ON partition
FOR EACH ROW
EXECUTE PROCEDURE adapt_and_insert_partition()
CREATE TRIGGER partition_update INSTEAD OF UPDATE ON partition
FOR EACH ROW
EXECUTE PROCEDURE adapt_and_update_partition();
ELSE
-- the pgs_distribution_metadata schema stores data distribution information
CREATE SCHEMA pgs_distribution_metadata
-- shard keeps track of hash value ranges for each shard
CREATE TABLE shard (
id bigint primary key default nextval('shard_id_sequence'),
relation_id oid not null,
storage "char" not null,
min_value text not null,
max_value text not null
)
-- shard_placement records which nodes contain which shards
CREATE TABLE shard_placement (
id bigint primary key default nextval('shard_placement_id_sequence'),
shard_id bigint not null references shard(id),
shard_state integer not null,
node_name text not null,
node_port integer not null
)
-- partition lists a partition key for each distributed table
CREATE TABLE partition (
relation_id oid unique not null,
partition_method "char" not null,
key text not null
)
-- make a few more indexes for fast access
CREATE INDEX shard_relation_index ON shard (relation_id)
CREATE INDEX shard_placement_node_name_node_port_index
ON shard_placement (node_name, node_port)
CREATE INDEX shard_placement_shard_index ON shard_placement (shard_id)
-- make sequences for shards and placements
CREATE SEQUENCE shard_id_sequence MINVALUE 10000 NO CYCLE
CREATE SEQUENCE shard_placement_id_sequence NO CYCLE;
-- associate sequences with their columns
ALTER SEQUENCE pgs_distribution_metadata.shard_id_sequence
OWNED BY pgs_distribution_metadata.shard.id;
ALTER SEQUENCE pgs_distribution_metadata.shard_placement_id_sequence
OWNED BY pgs_distribution_metadata.shard_placement.id;
-- mark each of the above as config tables to have pg_dump preserve them
PERFORM pg_catalog.pg_extension_config_dump(
'pgs_distribution_metadata.shard', '');
PERFORM pg_catalog.pg_extension_config_dump(
'pgs_distribution_metadata.shard_placement', '');
PERFORM pg_catalog.pg_extension_config_dump(
'pgs_distribution_metadata.partition', '');
END IF;
END;
$$;
-- define the table distribution functions
CREATE FUNCTION master_create_distributed_table(table_name text, partition_column text,
partition_method "char" DEFAULT 'h')
RETURNS void
AS 'MODULE_PATHNAME'
LANGUAGE C STRICT;
CREATE FUNCTION master_create_worker_shards(table_name text, shard_count integer,
replication_factor integer DEFAULT 2)
RETURNS void
AS 'MODULE_PATHNAME'
LANGUAGE C STRICT;
-- define the repair functions
CREATE FUNCTION master_copy_shard_placement(shard_id bigint,
source_node_name text,
source_node_port integer,
target_node_name text,
target_node_port integer)
RETURNS void
AS 'MODULE_PATHNAME'
LANGUAGE C STRICT;
CREATE FUNCTION worker_copy_shard_placement(table_name text, source_node_name text,
source_node_port integer)
RETURNS void
AS 'MODULE_PATHNAME'
LANGUAGE C STRICT;
CREATE FUNCTION partition_column_to_node_string(table_oid oid)
RETURNS text
AS 'MODULE_PATHNAME'
LANGUAGE C;
CREATE FUNCTION column_name_to_column(table_oid oid, column_name text)
RETURNS text
AS 'MODULE_PATHNAME'
LANGUAGE C STABLE STRICT;
COMMENT ON FUNCTION partition_column_to_node_string(oid)
IS 'return textual form of distributed table''s partition column';
-- Syncs rows from the pg_shard distribution metadata related to the specified
-- table name into the metadata tables used by CitusDB. After a call to this
-- function for a particular pg_shard table, that table will become usable for
-- queries within CitusDB. If placement health has changed for given pg_shard
-- table, calling this function an additional time will propagate those health
-- changes to the CitusDB metadata tables.
CREATE FUNCTION sync_table_metadata_to_citus(table_name text)
RETURNS void
AS $sync_table_metadata_to_citus$
DECLARE
table_relation_id CONSTANT oid NOT NULL := table_name::regclass::oid;
dummy_shard_length CONSTANT bigint := 0;
warning_msg CONSTANT text := 'sync_table_metadata_to_citus is deprecated and ' ||
'will be removed in a future version';
BEGIN
RAISE WARNING '%', warning_msg;
-- grab lock to ensure single writer for upsert
LOCK TABLE pg_dist_shard_placement IN EXCLUSIVE MODE;
-- First, update the health of shard placement rows already copied
-- from pg_shard to CitusDB. Health is the only mutable attribute,
-- so it is presently the only one needing the UPDATE treatment.
UPDATE pg_dist_shard_placement
SET shardstate = shard_placement.shard_state
FROM pgs_distribution_metadata.shard_placement
WHERE shardid = shard_placement.shard_id AND
nodename = shard_placement.node_name AND
nodeport = shard_placement.node_port AND
shardid IN (SELECT shardid
FROM pg_dist_shard
WHERE logicalrelid = table_relation_id);
-- copy pg_shard placement rows not yet in CitusDB's metadata tables
INSERT INTO pg_dist_shard_placement
(shardid,
shardstate,
shardlength,
nodename,
nodeport)
SELECT shard_id,
shard_state,
dummy_shard_length,
node_name,
node_port
FROM pgs_distribution_metadata.shard_placement
LEFT OUTER JOIN pg_dist_shard_placement
ON ( shardid = shard_placement.shard_id AND
nodename = shard_placement.node_name AND
nodeport = shard_placement.node_port )
WHERE shardid IS NULL AND
shard_id IN (SELECT id
FROM pgs_distribution_metadata.shard
WHERE relation_id = table_relation_id);
-- copy pg_shard shard rows not yet in CitusDB's metadata tables
INSERT INTO pg_dist_shard
(shardid,
logicalrelid,
shardstorage,
shardminvalue,
shardmaxvalue)
SELECT id,
relation_id,
storage,
min_value,
max_value
FROM pgs_distribution_metadata.shard
LEFT OUTER JOIN pg_dist_shard
ON ( shardid = shard.id )
WHERE shardid IS NULL AND
relation_id = table_relation_id;
-- Finally, copy pg_shard partition rows not yet in CitusDB's metadata
-- tables. CitusDB uses a textual form of a Var node representing the
-- partition column, so we must use a special function to transform the
-- representation used by pg_shard (which is just the column name).
INSERT INTO pg_dist_partition
(logicalrelid,
partmethod,
partkey)
SELECT relation_id,
partition_method,
partition_column_to_node_string(table_relation_id)
FROM pgs_distribution_metadata.partition
LEFT OUTER JOIN pg_dist_partition
ON ( logicalrelid = partition.relation_id )
WHERE logicalrelid IS NULL AND
relation_id = table_relation_id;
END;
$sync_table_metadata_to_citus$ LANGUAGE 'plpgsql';
COMMENT ON FUNCTION sync_table_metadata_to_citus(text)
IS 'synchronize a distributed table''s pg_shard metadata to CitusDB';
-- Creates a temporary table exactly like the specified target table along with
-- a trigger to redirect any INSERTed rows from the proxy to the underlying
-- table. Users may optionally provide a sequence which will be incremented
-- after each row that has been successfully proxied (useful for counting rows
-- processed). Returns the name of the proxy table that was created.
CREATE FUNCTION create_insert_proxy_for_table(target_table regclass,
sequence regclass DEFAULT NULL)
RETURNS text
AS $create_insert_proxy_for_table$
DECLARE
temp_table_name text;
attr_names text[];
attr_list text;
param_list text;
using_list text;
insert_command text;
-- templates to create dynamic functions, tables, and triggers
func_tmpl CONSTANT text := $$CREATE FUNCTION pg_temp.copy_to_insert()
RETURNS trigger
AS $copy_to_insert$
BEGIN
EXECUTE %L USING %s;
PERFORM nextval(%L);
RETURN NULL;
END;
$copy_to_insert$ LANGUAGE plpgsql;$$;
table_tmpl CONSTANT text := $$CREATE TEMPORARY TABLE %I
(LIKE %s INCLUDING DEFAULTS)$$;
trigger_tmpl CONSTANT text := $$CREATE TRIGGER copy_to_insert
BEFORE INSERT ON %s FOR EACH ROW
EXECUTE PROCEDURE pg_temp.copy_to_insert()$$;
BEGIN
-- create name of temporary table using unqualified input table name
SELECT format('%s_insert_proxy', relname)
INTO STRICT temp_table_name
FROM pg_class
WHERE oid = target_table;
-- get list of all attributes in table, we'll need shortly
SELECT array_agg(attname)
INTO STRICT attr_names
FROM pg_attribute
WHERE attrelid = target_table AND
attnum > 0 AND
NOT attisdropped;
-- build fully specified column list and USING clause from attr. names
SELECT string_agg(quote_ident(attr_name), ','),
string_agg(format('NEW.%I', attr_name), ',')
INTO STRICT attr_list,
using_list
FROM unnest(attr_names) AS attr_name;
-- build ($1, $2, $3)-style VALUE list to bind parameters
SELECT string_agg('$' || param_num, ',')
INTO STRICT param_list
FROM generate_series(1, array_length(attr_names, 1)) AS param_num;
-- use the above lists to generate appropriate INSERT command
insert_command = format('INSERT INTO %s (%s) VALUES (%s)', target_table,
attr_list, param_list);
-- use the command to make one-off trigger targeting specified table
EXECUTE format(func_tmpl, insert_command, using_list, sequence);
-- create a temporary table exactly like the target table...
EXECUTE format(table_tmpl, temp_table_name, target_table);
-- ... and install the trigger on that temporary table
EXECUTE format(trigger_tmpl, quote_ident(temp_table_name)::regclass);
RETURN temp_table_name;
END;
$create_insert_proxy_for_table$ LANGUAGE plpgsql SET search_path = 'pg_catalog';
COMMENT ON FUNCTION create_insert_proxy_for_table(regclass, regclass)
IS 'create a proxy table that redirects INSERTed rows to a target table'; | the_stack |
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for admin
-- ----------------------------
DROP TABLE IF EXISTS `admin`;
CREATE TABLE `admin` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '管理员编号',
`name` varchar(10) NOT NULL COMMENT '真实名字',
`avatar` varchar(255) DEFAULT NULL COMMENT '头像',
`department_id` int(11) DEFAULT NULL COMMENT '部门id',
`status` tinyint(4) NOT NULL COMMENT '在职状态',
`username` varchar(16) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '登陆账号',
`password` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '加密后的密码',
`password_salt` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '密码的盐',
`create_admin_id` int(11) NOT NULL COMMENT '创建管理员编号',
`create_ip` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '创建 IP',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uk_username` (`username`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=34 DEFAULT CHARSET=utf8mb4 COMMENT='管理员';
-- ----------------------------
-- Table structure for admin_department
-- ----------------------------
DROP TABLE IF EXISTS `admin_department`;
CREATE TABLE `admin_department` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '部门编号',
`name` varchar(100) COLLATE utf8mb4_bin NOT NULL COMMENT '部门名称',
`sort` int(11) NOT NULL DEFAULT '0' COMMENT '排序字段',
`pid` int(11) NOT NULL DEFAULT '0' COMMENT '父级部门编号',
`create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '删除标记',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='部门';
-- ----------------------------
-- Table structure for oauth2_access_token
-- ----------------------------
DROP TABLE IF EXISTS `oauth2_access_token`;
CREATE TABLE `oauth2_access_token` (
`id` varchar(32) NOT NULL COMMENT '访问令牌',
`user_id` int(11) NOT NULL COMMENT '用户编号',
`user_type` tinyint(4) NOT NULL COMMENT '用户类型',
`refresh_token` varchar(32) NOT NULL COMMENT '刷新令牌',
`expires_time` datetime NOT NULL COMMENT '过期时间',
`create_ip` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '创建 IP',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_userId` (`user_id`) USING BTREE,
KEY `idx_refreshToken` (`refresh_token`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='访问令牌';
-- ----------------------------
-- Table structure for oauth2_refresh_token
-- ----------------------------
DROP TABLE IF EXISTS `oauth2_refresh_token`;
CREATE TABLE `oauth2_refresh_token` (
`id` varchar(32) NOT NULL COMMENT '编号,刷新令牌',
`user_id` int(11) NOT NULL COMMENT '用户编号',
`user_type` tinyint(4) NOT NULL COMMENT '用户类型',
`expires_time` datetime NOT NULL COMMENT '过期时间',
`create_ip` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT '创建 IP',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_userId` (`user_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='刷新令牌';
-- ----------------------------
-- Table structure for permission_admin_role
-- ----------------------------
DROP TABLE IF EXISTS `permission_admin_role`;
CREATE TABLE `permission_admin_role` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '编号',
`admin_id` int(11) NOT NULL COMMENT '管理员编号',
`role_id` int(11) NOT NULL COMMENT '角色编号',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
`deleted` bit(1) DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=40 DEFAULT CHARSET=utf8mb4 COMMENT='管理员角色';
-- ----------------------------
-- Table structure for permission_resource
-- ----------------------------
DROP TABLE IF EXISTS `permission_resource`;
CREATE TABLE `permission_resource` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '资源编号',
`name` varchar(50) NOT NULL DEFAULT '' COMMENT '菜单名',
`permission` varchar(255) DEFAULT NULL COMMENT '权限标识',
`type` int(11) NOT NULL COMMENT '资源类型',
`sort` int(11) NOT NULL COMMENT '排序',
`pid` int(11) NOT NULL DEFAULT '0' COMMENT '父级资源编号(外键:{@link ResourceDO#id})',
`route` varchar(50) DEFAULT NULL COMMENT '前端路由',
`icon` varchar(50) DEFAULT NULL COMMENT '菜单图标',
`view` varchar(50) DEFAULT NULL COMMENT '前端界面',
`create_admin_id` int(11) NOT NULL COMMENT '创建管理员编号',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '添加时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=82 DEFAULT CHARSET=utf8mb4 COMMENT='资源';
-- ----------------------------
-- Table structure for permission_role
-- ----------------------------
DROP TABLE IF EXISTS `permission_role`;
CREATE TABLE `permission_role` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '角色编号',
`name` varchar(50) NOT NULL DEFAULT '' COMMENT '角色名',
`code` varchar(50) DEFAULT NULL COMMENT '角色编码',
`type` tinyint(4) NOT NULL COMMENT '角色类型',
`create_admin_id` int(11) NOT NULL COMMENT '创建管理员编号',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8mb4 COMMENT='角色';
-- ----------------------------
-- Table structure for permission_role_resource
-- ----------------------------
DROP TABLE IF EXISTS `permission_role_resource`;
CREATE TABLE `permission_role_resource` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '编号',
`role_id` int(11) NOT NULL DEFAULT '-1' COMMENT '角色编号(外键:{@link RoleDO}',
`resource_id` int(11) NOT NULL DEFAULT '-1' COMMENT '资源编号',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=860 DEFAULT CHARSET=utf8mb4 COMMENT='角色资源';
-- ----------------------------
-- Table structure for system_access_log
-- ----------------------------
DROP TABLE IF EXISTS `system_access_log`;
CREATE TABLE `system_access_log` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '编号',
`user_id` int(11) DEFAULT NULL COMMENT '用户编号',
`user_type` tinyint(4) DEFAULT NULL COMMENT '用户类型',
`trace_id` varchar(64) DEFAULT NULL COMMENT '链路追踪编号',
`application_name` varchar(50) NOT NULL COMMENT '应用名',
`uri` varchar(4096) NOT NULL DEFAULT '' COMMENT '访问地址',
`query_string` varchar(4096) NOT NULL DEFAULT '' COMMENT '参数',
`method` varchar(50) NOT NULL DEFAULT '' COMMENT 'http 方法',
`user_agent` varchar(1024) NOT NULL DEFAULT '' COMMENT 'userAgent',
`ip` varchar(50) NOT NULL DEFAULT '' COMMENT 'ip',
`start_time` datetime NOT NULL COMMENT '请求时间',
`response_time` int(11) NOT NULL COMMENT '响应时长 -- 毫秒级',
`error_code` int(11) NOT NULL COMMENT '错误码',
`error_message` varchar(512) DEFAULT NULL COMMENT '错误提示',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=87141 DEFAULT CHARSET=utf8mb4 COMMENT='系统访问日志';
-- ----------------------------
-- Table structure for system_data_dict
-- ----------------------------
DROP TABLE IF EXISTS `system_data_dict`;
CREATE TABLE `system_data_dict` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '编号',
`enum_value` varchar(50) NOT NULL DEFAULT '' COMMENT '大类枚举值',
`value` varchar(50) NOT NULL DEFAULT '' COMMENT '小类数值',
`display_name` varchar(50) NOT NULL DEFAULT '' COMMENT '展示名',
`sort` int(11) NOT NULL DEFAULT '-1' COMMENT '排序值',
`memo` varchar(50) DEFAULT '' COMMENT '备注',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=67 DEFAULT CHARSET=utf8mb4 COMMENT='数据字典';
-- ----------------------------
-- Table structure for system_error_code
-- ----------------------------
CREATE TABLE `system_error_code` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '错误码编号',
`code` int(11) NOT NULL DEFAULT '0' COMMENT '错误码编码',
`message` varchar(255) NOT NULL DEFAULT '' COMMENT '错误码错误提示',
`type` tinyint(4) NOT NULL COMMENT '错误码类型',
`group` varchar(64) NOT NULL COMMENT '错误码分组',
`memo` varchar(255) DEFAULT NULL COMMENT '错误码备注',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
`deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT '是否删除',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=350 DEFAULT CHARSET=utf8mb4 COMMENT='错误码';
-- ----------------------------
-- Table structure for system_exception_log
-- ----------------------------
DROP TABLE IF EXISTS `system_exception_log`;
CREATE TABLE `system_exception_log` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '编号',
`user_id` int(11) DEFAULT NULL COMMENT '用户编号',
`user_type` tinyint(4) DEFAULT NULL COMMENT '用户类型',
`trace_id` varchar(64) NOT NULL COMMENT '链路追踪编号\n *\n * 一般来说,通过链路追踪编号,可以将访问日志,错误日志,链路追踪日志,logger 打印日志等,结合在一起,从而进行排错。',
`application_name` varchar(50) NOT NULL COMMENT '应用名\n *\n * 目前读取 spring.application.name',
`uri` varchar(4096) NOT NULL COMMENT '访问地址',
`query_string` varchar(4096) NOT NULL COMMENT '参数',
`method` varchar(50) NOT NULL COMMENT 'http 方法',
`user_agent` varchar(1024) NOT NULL COMMENT 'userAgent',
`ip` varchar(50) NOT NULL COMMENT 'ip',
`exception_time` datetime NOT NULL COMMENT '异常发生时间',
`exception_name` varchar(128) NOT NULL DEFAULT '' COMMENT '异常名\n *\n * {@link Throwable#getClass()} 的类全名',
`exception_message` text NOT NULL COMMENT '异常导致的消息\n *\n * {@link cn.iocoder.common.framework.util.ExceptionUtil#getMessage(Throwable)}',
`exception_root_cause_message` text NOT NULL COMMENT '异常导致的根消息\n *\n * {@link cn.iocoder.common.framework.util.ExceptionUtil#getRootCauseMessage(Throwable)}',
`exception_stack_trace` text NOT NULL COMMENT '异常的栈轨迹\n *\n * {@link cn.iocoder.common.framework.util.ExceptionUtil#getServiceException(Exception)}',
`exception_class_name` varchar(512) NOT NULL COMMENT '异常发生的类全名\n *\n * {@link StackTraceElement#getClassName()}',
`exception_file_name` varchar(512) NOT NULL COMMENT '异常发生的类文件\n *\n * {@link StackTraceElement#getFileName()}',
`exception_method_name` varchar(512) NOT NULL COMMENT '异常发生的方法名\n *\n * {@link StackTraceElement#getMethodName()}',
`exception_line_number` int(11) NOT NULL COMMENT '异常发生的方法所在行\n *\n * {@link StackTraceElement#getLineNumber()}',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '最后更新时间',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1012 DEFAULT CHARSET=utf8mb4 COMMENT='系统异常日志';
SET FOREIGN_KEY_CHECKS = 1; | the_stack |
-- MySQL dump 10.13 Distrib 5.6.23, for Win32 (x86)
--
-- Host: localhost Database: weixinorder
-- ------------------------------------------------------
-- Server version 5.5.5-10.1.9-MariaDB-log
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `new`
--
DROP TABLE IF EXISTS `new`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `new` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(45) NOT NULL,
`content` text NOT NULL,
`create_at` varchar(45) NOT NULL COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='新闻表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `new`
--
LOCK TABLES `new` WRITE;
/*!40000 ALTER TABLE `new` DISABLE KEYS */;
/*!40000 ALTER TABLE `new` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `user`
--
DROP TABLE IF EXISTS `user`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '管理员ID',
`username` varchar(32) NOT NULL COMMENT '管理员昵称',
`mail` varchar(150) NOT NULL DEFAULT '' COMMENT '管理员邮箱',
`tel` varchar(150) NOT NULL COMMENT '管理员电话',
`password` varchar(35) NOT NULL COMMENT '管理员登录密码',
`login_time` int(11) NOT NULL DEFAULT '0' COMMENT '管理员登录时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='管理员表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `user`
--
LOCK TABLES `user` WRITE;
/*!40000 ALTER TABLE `user` DISABLE KEYS */;
INSERT INTO `user` VALUES (1,'ppoo','jiaying.yang@qq.com','13250150526','e10adc3949ba59abbe56e057f20f883e',1489323209);
/*!40000 ALTER TABLE `user` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_a_info`
--
DROP TABLE IF EXISTS `w_a_info`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_a_info` (
`room_id` int(11) NOT NULL AUTO_INCREMENT COMMENT '餐厅ID',
`room_name` varchar(45) NOT NULL COMMENT '餐厅名称',
`room_addr` varchar(150) NOT NULL DEFAULT '' COMMENT '餐厅地址',
`table_number` int(11) NOT NULL DEFAULT '0' COMMENT '餐桌数量表',
`admin_id` int(11) NOT NULL COMMENT '餐厅所属的管理员',
`create_at` int(11) NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_at` int(11) NOT NULL DEFAULT '0' COMMENT '修改时间',
PRIMARY KEY (`room_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='餐厅基本信息设置表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_a_info`
--
LOCK TABLES `w_a_info` WRITE;
/*!40000 ALTER TABLE `w_a_info` DISABLE KEYS */;
INSERT INTO `w_a_info` VALUES (1,'仙庙烧鸡','广州市荔湾区汾水',50,41,1487949198,1488086016);
/*!40000 ALTER TABLE `w_a_info` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_info`
--
DROP TABLE IF EXISTS `w_info`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_info` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '推送消息ID',
`title` varchar(150) NOT NULL DEFAULT '' COMMENT '推送消息标题',
`content` text NOT NULL COMMENT '推送消息内容',
`create_at` int(11) NOT NULL DEFAULT '0' COMMENT '推送消息时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='消息推送表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_info`
--
LOCK TABLES `w_info` WRITE;
/*!40000 ALTER TABLE `w_info` DISABLE KEYS */;
/*!40000 ALTER TABLE `w_info` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_menu`
--
DROP TABLE IF EXISTS `w_menu`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_menu` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '菜式ID',
`menu_name` varchar(45) NOT NULL DEFAULT '' COMMENT '菜式名称',
`price` int(11) NOT NULL DEFAULT '0' COMMENT '单价',
`menu_type` int(11) NOT NULL DEFAULT '1' COMMENT '菜式分类ID,默认是1',
`menu_number` int(11) NOT NULL DEFAULT '0' COMMENT '菜式数量',
`menu_logo` varchar(45) NOT NULL DEFAULT '' COMMENT '菜式logo图片',
`update_at` int(11) NOT NULL DEFAULT '0' COMMENT '修改时间(时间戳)',
`create_at` int(11) NOT NULL DEFAULT '0' COMMENT '创建时间(时间戳)',
`is_delete` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否已经删除(0没有删除,1表示删除)',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8 COMMENT='门店菜单表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_menu`
--
LOCK TABLES `w_menu` WRITE;
/*!40000 ALTER TABLE `w_menu` DISABLE KEYS */;
INSERT INTO `w_menu` VALUES (1,'水煮牛肉',30,1,20,'uploads/58a7b4e5172e1.jpg',1487385829,1485928662,0),(2,'化州糖水',30,5,20,'uploads/58a703411b92e.jpg',1487340353,1485928685,0),(3,'台湾蛋蒲(人气小吃)',15,5,30,'uploads/58a7026c9acb9.jpg',1487340140,1487340140,0),(4,'红烧乳猪',30,1,20,'uploads/58a7b2289d3bb.jpg',1487385128,1487385128,0),(5,'清远鸡',50,1,50,'uploads/58a7b2441c57a.jpg',1487385156,1487385156,0),(6,'红烧虾仁',36,1,30,'uploads/58a7b4afb0c2c.jpg',1487385775,1487385609,0);
/*!40000 ALTER TABLE `w_menu` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_menu_type`
--
DROP TABLE IF EXISTS `w_menu_type`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_menu_type` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '分类ID',
`type_name` varchar(45) NOT NULL COMMENT '分类名称',
`is_delete` tinyint(4) NOT NULL DEFAULT '0' COMMENT '是否删除(0没有删除,1已经删除)',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=utf8 COMMENT='菜式分类表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_menu_type`
--
LOCK TABLES `w_menu_type` WRITE;
/*!40000 ALTER TABLE `w_menu_type` DISABLE KEYS */;
INSERT INTO `w_menu_type` VALUES (1,'粤菜',0),(2,'点心',0),(3,'食堂',0),(4,'小吃',0),(5,'台湾菜',0),(6,'甜品',0),(7,'校园美食',0),(8,'小炒',0),(9,'推荐',0),(10,'面食',0),(11,'水果',0),(12,'测试',0);
/*!40000 ALTER TABLE `w_menu_type` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_order`
--
DROP TABLE IF EXISTS `w_order`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_order` (
`id` varchar(32) NOT NULL COMMENT '订单ID,随机生成',
`user_id` int(11) NOT NULL COMMENT '客户ID',
`order_status` int(11) NOT NULL DEFAULT '0' COMMENT '订单状态(0等待接单,1商户已收单)',
`order_time` int(11) NOT NULL DEFAULT '0' COMMENT '订单生成时间',
`pay_status` int(11) NOT NULL DEFAULT '0' COMMENT '支付状态(0未支付,1支付完成)',
`pay_time` int(11) NOT NULL DEFAULT '0' COMMENT '支付时间(时间戳)',
`total_price` int(11) NOT NULL DEFAULT '0' COMMENT '订单的总价',
`table_number` tinyint(4) NOT NULL COMMENT '订单所在的桌子编号',
`remark` varchar(150) NOT NULL DEFAULT '' COMMENT '订单备注',
`people` int(4) NOT NULL DEFAULT '0' COMMENT '就餐人数',
PRIMARY KEY (`id`),
UNIQUE KEY `id` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='订单表(下单表)';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_order`
--
LOCK TABLES `w_order` WRITE;
/*!40000 ALTER TABLE `w_order` DISABLE KEYS */;
INSERT INTO `w_order` VALUES ('1456972',1,1,1489205813,1,1489205815,60,15,'不要辣',5),('5648795',1,0,1489205813,1,1489205815,70,14,'不要辣',4);
/*!40000 ALTER TABLE `w_order` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_order_detail`
--
DROP TABLE IF EXISTS `w_order_detail`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_order_detail` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'ID',
`order_id` int(11) NOT NULL COMMENT '订单ID',
`menu_id` int(11) NOT NULL COMMENT '菜式ID',
`menu_number` int(11) NOT NULL DEFAULT '0' COMMENT '菜式数量',
`menu_price` int(11) NOT NULL DEFAULT '0' COMMENT '菜式单价',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8 COMMENT='订单详情表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_order_detail`
--
LOCK TABLES `w_order_detail` WRITE;
/*!40000 ALTER TABLE `w_order_detail` DISABLE KEYS */;
INSERT INTO `w_order_detail` VALUES (1,1456972,1,1,30),(2,1456972,2,1,30),(3,5648795,4,1,30),(4,5648795,6,1,36);
/*!40000 ALTER TABLE `w_order_detail` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_table`
--
DROP TABLE IF EXISTS `w_table`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_table` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '桌子编号ID',
`table_qrcode_pic` varchar(150) NOT NULL COMMENT '桌子对应的二维码支付图片',
`table_qrcode_url` varchar(150) NOT NULL COMMENT '桌子对应的二维码支付连接',
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='门店桌子编号表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_table`
--
LOCK TABLES `w_table` WRITE;
/*!40000 ALTER TABLE `w_table` DISABLE KEYS */;
/*!40000 ALTER TABLE `w_table` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `w_user`
--
DROP TABLE IF EXISTS `w_user`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `w_user` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT '客户id',
`nickName` varchar(45) NOT NULL COMMENT '微信名称',
`avatarUrl` varchar(45) NOT NULL COMMENT '微信头像',
`gender` tinyint(1) NOT NULL DEFAULT '0' COMMENT '性别 0:未知、1:男、2:女',
`credit` int(11) NOT NULL DEFAULT '0' COMMENT '客户积分',
`openid` varchar(45) NOT NULL COMMENT '用户微信标识,公众号获取',
`unionid` varchar(50) NOT NULL DEFAULT '' COMMENT '微信跟小程序通用的用户ID',
`create_at` int(11) NOT NULL DEFAULT '0' COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='客户表';
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `w_user`
--
LOCK TABLES `w_user` WRITE;
/*!40000 ALTER TABLE `w_user` DISABLE KEYS */;
INSERT INTO `w_user` VALUES (1,'嘉颖YJY','http://wx.qlogo.cn/mmopen/qiaEh4ooRLe0oqRqeBJ',1,0,'oP4OrvwxfELTrc-F__Sie7L8rsV0','',1489203843);
/*!40000 ALTER TABLE `w_user` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2017-05-08 22:01:40 | the_stack |
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.7
-- Dumped by pg_dump version 10.10 (Ubuntu 10.10-0ubuntu0.18.04.1)
--
-- Name: assignment; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.assignment (
id integer NOT NULL,
user_id integer NOT NULL,
campaign_id integer NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
max_contacts integer
);
--
-- Name: assignment_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.assignment_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: assignment_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.assignment_id_seq OWNED BY public.assignment.id;
--
-- Name: campaign; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.campaign (
id integer NOT NULL,
organization_id integer NOT NULL,
creator_id integer,
title text DEFAULT ''::text NOT NULL,
description text DEFAULT ''::text NOT NULL,
is_started boolean,
due_by timestamp with time zone,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
is_archived boolean,
use_dynamic_assignment boolean,
logo_image_url text,
intro_html text,
primary_color text,
override_organization_texting_hours boolean DEFAULT false,
texting_hours_enforced boolean DEFAULT true,
texting_hours_start integer DEFAULT 9,
texting_hours_end integer DEFAULT 21,
timezone text DEFAULT 'US/Eastern'::text,
messaging_service_sid character varying(255)
);
--
-- Name: campaign_contact; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.campaign_contact (
id integer NOT NULL,
campaign_id integer NOT NULL,
assignment_id integer,
external_id text DEFAULT ''::text NOT NULL,
first_name text DEFAULT ''::text NOT NULL,
last_name text DEFAULT ''::text NOT NULL,
cell text NOT NULL,
zip text DEFAULT ''::text NOT NULL,
custom_fields text DEFAULT '{}'::text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
message_status text DEFAULT 'needsMessage'::text NOT NULL,
is_opted_out boolean DEFAULT false,
timezone_offset text DEFAULT ''::text,
has_unresolved_tags boolean DEFAULT false,
external_id_type character varying(255) DEFAULT NULL::character varying,
state_code character varying(255) DEFAULT NULL::character varying
);
--
-- Name: campaign_contact_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.campaign_contact_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: campaign_contact_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.campaign_contact_id_seq OWNED BY public.campaign_contact.id;
--
-- Name: campaign_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.campaign_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: campaign_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.campaign_id_seq OWNED BY public.campaign.id;
--
-- Name: canned_response; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.canned_response (
id integer NOT NULL,
campaign_id integer NOT NULL,
text text NOT NULL,
title text NOT NULL,
user_id integer,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
survey_question character varying(255) DEFAULT NULL::character varying
);
--
-- Name: canned_response_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.canned_response_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: canned_response_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.canned_response_id_seq OWNED BY public.canned_response.id;
--
-- Name: interaction_step; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.interaction_step (
id integer NOT NULL,
campaign_id integer NOT NULL,
question text DEFAULT ''::text NOT NULL,
script text DEFAULT ''::text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
parent_interaction_id integer,
answer_option text DEFAULT ''::text NOT NULL,
answer_actions text DEFAULT ''::text NOT NULL,
is_deleted boolean DEFAULT false NOT NULL
);
--
-- Name: interaction_step_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.interaction_step_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: interaction_step_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.interaction_step_id_seq OWNED BY public.interaction_step.id;
--
-- Name: invite; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.invite (
id integer NOT NULL,
is_valid boolean NOT NULL,
hash text,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: invite_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.invite_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: invite_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.invite_id_seq OWNED BY public.invite.id;
--
-- Name: job_request; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.job_request (
id integer NOT NULL,
campaign_id integer NOT NULL,
payload text NOT NULL,
queue_name text NOT NULL,
job_type text NOT NULL,
result_message text DEFAULT ''::text,
locks_queue boolean DEFAULT false,
assigned boolean DEFAULT false,
status integer DEFAULT 0,
updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: job_request_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.job_request_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: job_request_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.job_request_id_seq OWNED BY public.job_request.id;
--
-- Name: log; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.log (
id integer NOT NULL,
message_sid text NOT NULL,
body text,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: log_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.log_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: log_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.log_id_seq OWNED BY public.log.id;
--
-- Name: message; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.message (
id integer NOT NULL,
user_id integer,
user_number text DEFAULT ''::text NOT NULL,
contact_number text NOT NULL,
is_from_contact boolean NOT NULL,
text text DEFAULT ''::text NOT NULL,
service_response text DEFAULT ''::text NOT NULL,
assignment_id integer NOT NULL,
service text DEFAULT ''::text NOT NULL,
service_id text DEFAULT ''::text NOT NULL,
send_status text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
queued_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
sent_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
service_response_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
send_before timestamp with time zone,
messaging_service_sid character varying(255),
canned_response_id integer
);
--
-- Name: message_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.message_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: message_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.message_id_seq OWNED BY public.message.id;
--
-- Name: migrations; Type: TABLE; Schema: public; Owner: -
--
-- CREATE TABLE public.migrations (
-- id integer NOT NULL,
-- completed integer NOT NULL
-- );
-- --
-- -- Name: migrations_id_seq; Type: SEQUENCE; Schema: public; Owner: -
-- --
-- CREATE SEQUENCE public.migrations_id_seq
-- AS integer
-- START WITH 1
-- INCREMENT BY 1
-- NO MINVALUE
-- NO MAXVALUE
-- CACHE 1;
--
-- Name: migrations_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
-- ALTER SEQUENCE public.migrations_id_seq OWNED BY public.migrations.id; --
--
-- Name: opt_out; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.opt_out (
id integer NOT NULL,
cell text NOT NULL,
assignment_id integer,
organization_id integer NOT NULL,
reason_code text DEFAULT ''::text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: opt_out_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.opt_out_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: opt_out_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.opt_out_id_seq OWNED BY public.opt_out.id;
--
-- Name: organization; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.organization (
id integer NOT NULL,
uuid text,
name text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
features text DEFAULT ''::text,
texting_hours_enforced boolean DEFAULT false,
texting_hours_start integer DEFAULT 9,
texting_hours_end integer DEFAULT 21
);
--
-- Name: organization_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.organization_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: organization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.organization_id_seq OWNED BY public.organization.id;
--
-- Name: pending_message_part; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.pending_message_part (
id integer NOT NULL,
service text NOT NULL,
service_id text NOT NULL,
parent_id text DEFAULT ''::text,
service_message text NOT NULL,
user_number text DEFAULT ''::text NOT NULL,
contact_number text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: pending_message_part_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.pending_message_part_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: pending_message_part_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.pending_message_part_id_seq OWNED BY public.pending_message_part.id;
--
-- Name: question_response; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.question_response (
id integer NOT NULL,
campaign_contact_id integer NOT NULL,
interaction_step_id integer NOT NULL,
value text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL
);
--
-- Name: question_response_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.question_response_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: question_response_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.question_response_id_seq OWNED BY public.question_response.id;
--
-- Name: tag; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.tag (
id integer NOT NULL,
campaign_contact_id integer NOT NULL,
tag text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
created_by integer,
resolved_at timestamp with time zone,
resolved_by integer
);
--
-- Name: tag_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.tag_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: tag_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.tag_id_seq OWNED BY public.tag.id;
--
-- Name: user; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public."user" (
id integer NOT NULL,
auth0_id text NOT NULL,
first_name text NOT NULL,
last_name text NOT NULL,
cell text NOT NULL,
email text NOT NULL,
created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL,
assigned_cell text,
is_superadmin boolean,
terms boolean DEFAULT false
);
--
-- Name: user_cell; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.user_cell (
id integer NOT NULL,
cell text NOT NULL,
user_id integer NOT NULL,
service text,
is_primary boolean
);
--
-- Name: user_cell_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.user_cell_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_cell_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.user_cell_id_seq OWNED BY public.user_cell.id;
--
-- Name: user_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.user_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.user_id_seq OWNED BY public."user".id;
--
-- Name: user_organization; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.user_organization (
id integer NOT NULL,
user_id integer NOT NULL,
organization_id integer NOT NULL,
role text NOT NULL
);
--
-- Name: user_organization_id_seq; Type: SEQUENCE; Schema: public; Owner: -
--
CREATE SEQUENCE public.user_organization_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
--
-- Name: user_organization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
--
ALTER SEQUENCE public.user_organization_id_seq OWNED BY public.user_organization.id;
--
-- Name: zip_code; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.zip_code (
zip text NOT NULL,
city text NOT NULL,
state text NOT NULL,
latitude real NOT NULL,
longitude real NOT NULL,
timezone_offset real NOT NULL,
has_dst boolean NOT NULL
);
--
-- Name: assignment id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.assignment ALTER COLUMN id SET DEFAULT nextval('public.assignment_id_seq'::regclass);
--
-- Name: campaign id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign ALTER COLUMN id SET DEFAULT nextval('public.campaign_id_seq'::regclass);
--
-- Name: campaign_contact id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign_contact ALTER COLUMN id SET DEFAULT nextval('public.campaign_contact_id_seq'::regclass);
--
-- Name: canned_response id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.canned_response ALTER COLUMN id SET DEFAULT nextval('public.canned_response_id_seq'::regclass);
--
-- Name: interaction_step id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.interaction_step ALTER COLUMN id SET DEFAULT nextval('public.interaction_step_id_seq'::regclass);
--
-- Name: invite id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invite ALTER COLUMN id SET DEFAULT nextval('public.invite_id_seq'::regclass);
--
-- Name: job_request id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.job_request ALTER COLUMN id SET DEFAULT nextval('public.job_request_id_seq'::regclass);
--
-- Name: log id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.log ALTER COLUMN id SET DEFAULT nextval('public.log_id_seq'::regclass);
--
-- Name: message id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.message ALTER COLUMN id SET DEFAULT nextval('public.message_id_seq'::regclass);
--
-- Name: migrations id; Type: DEFAULT; Schema: public; Owner: -
--
-- ALTER TABLE ONLY public.migrations ALTER COLUMN id SET DEFAULT nextval('public.migrations_id_seq'::regclass);
--
-- Name: opt_out id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.opt_out ALTER COLUMN id SET DEFAULT nextval('public.opt_out_id_seq'::regclass);
--
-- Name: organization id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.organization ALTER COLUMN id SET DEFAULT nextval('public.organization_id_seq'::regclass);
--
-- Name: pending_message_part id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.pending_message_part ALTER COLUMN id SET DEFAULT nextval('public.pending_message_part_id_seq'::regclass);
--
-- Name: question_response id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.question_response ALTER COLUMN id SET DEFAULT nextval('public.question_response_id_seq'::regclass);
--
-- Name: tag id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.tag ALTER COLUMN id SET DEFAULT nextval('public.tag_id_seq'::regclass);
--
-- Name: user id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."user" ALTER COLUMN id SET DEFAULT nextval('public.user_id_seq'::regclass);
--
-- Name: user_cell id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_cell ALTER COLUMN id SET DEFAULT nextval('public.user_cell_id_seq'::regclass);
--
-- Name: user_organization id; Type: DEFAULT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_organization ALTER COLUMN id SET DEFAULT nextval('public.user_organization_id_seq'::regclass);
--
-- Name: assignment assignment_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.assignment
ADD CONSTRAINT assignment_pkey PRIMARY KEY (id);
--
-- Name: campaign_contact campaign_contact_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign_contact
ADD CONSTRAINT campaign_contact_pkey PRIMARY KEY (id);
--
-- Name: campaign campaign_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign
ADD CONSTRAINT campaign_pkey PRIMARY KEY (id);
--
-- Name: canned_response canned_response_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.canned_response
ADD CONSTRAINT canned_response_pkey PRIMARY KEY (id);
--
-- Name: interaction_step interaction_step_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.interaction_step
ADD CONSTRAINT interaction_step_pkey PRIMARY KEY (id);
--
-- Name: invite invite_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.invite
ADD CONSTRAINT invite_pkey PRIMARY KEY (id);
--
-- Name: job_request job_request_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.job_request
ADD CONSTRAINT job_request_pkey PRIMARY KEY (id);
--
-- Name: log log_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.log
ADD CONSTRAINT log_pkey PRIMARY KEY (id);
--
-- Name: message message_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_pkey PRIMARY KEY (id);
--
-- Name: migrations migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
-- ALTER TABLE ONLY public.migrations
-- ADD CONSTRAINT migrations_pkey PRIMARY KEY (id);
--
-- Name: opt_out opt_out_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.opt_out
ADD CONSTRAINT opt_out_pkey PRIMARY KEY (id);
--
-- Name: organization organization_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.organization
ADD CONSTRAINT organization_pkey PRIMARY KEY (id);
--
-- Name: pending_message_part pending_message_part_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.pending_message_part
ADD CONSTRAINT pending_message_part_pkey PRIMARY KEY (id);
--
-- Name: question_response question_response_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.question_response
ADD CONSTRAINT question_response_pkey PRIMARY KEY (id);
--
-- Name: tag tag_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.tag
ADD CONSTRAINT tag_pkey PRIMARY KEY (id);
--
-- Name: user user_auth0_id_unique; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_auth0_id_unique UNIQUE (auth0_id);
--
-- Name: user_cell user_cell_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_cell
ADD CONSTRAINT user_cell_pkey PRIMARY KEY (id);
--
-- Name: user_organization user_organization_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_organization
ADD CONSTRAINT user_organization_pkey PRIMARY KEY (id);
--
-- Name: user user_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public."user"
ADD CONSTRAINT user_pkey PRIMARY KEY (id);
--
-- Name: zip_code zip_code_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.zip_code
ADD CONSTRAINT zip_code_pkey PRIMARY KEY (zip);
--
-- Name: assignment_campaign_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX assignment_campaign_id_index ON public.assignment USING btree (campaign_id);
--
-- Name: assignment_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX assignment_user_id_index ON public.assignment USING btree (user_id);
--
-- Name: campaign_contact_assignment_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_contact_assignment_id_index ON public.campaign_contact USING btree (assignment_id);
--
-- Name: campaign_contact_assignment_id_timezone_offset_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_contact_assignment_id_timezone_offset_index ON public.campaign_contact USING btree (assignment_id, timezone_offset);
--
-- Name: campaign_contact_campaign_id_assignment_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_contact_campaign_id_assignment_id_index ON public.campaign_contact USING btree (campaign_id, assignment_id);
--
-- Name: campaign_contact_campaign_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_contact_campaign_id_index ON public.campaign_contact USING btree (campaign_id);
--
-- Name: campaign_contact_cell_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_contact_cell_index ON public.campaign_contact USING btree (cell);
--
-- Name: campaign_creator_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_creator_id_index ON public.campaign USING btree (creator_id);
--
-- Name: campaign_organization_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX campaign_organization_id_index ON public.campaign USING btree (organization_id);
--
-- Name: canned_response_campaign_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX canned_response_campaign_id_index ON public.canned_response USING btree (campaign_id);
--
-- Name: canned_response_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX canned_response_user_id_index ON public.canned_response USING btree (user_id);
--
-- Name: interaction_step_campaign_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX interaction_step_campaign_id_index ON public.interaction_step USING btree (campaign_id);
--
-- Name: interaction_step_parent_interaction_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX interaction_step_parent_interaction_id_index ON public.interaction_step USING btree (parent_interaction_id);
--
-- Name: invite_is_valid_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX invite_is_valid_index ON public.invite USING btree (is_valid);
--
-- Name: job_request_queue_name_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX job_request_queue_name_index ON public.job_request USING btree (queue_name);
--
-- Name: message_assignment_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_assignment_id_index ON public.message USING btree (assignment_id);
--
-- Name: message_contact_number_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_contact_number_index ON public.message USING btree (contact_number);
--
-- Name: message_messaging_service_sid_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_messaging_service_sid_index ON public.message USING btree (messaging_service_sid);
--
-- Name: message_send_status_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_send_status_index ON public.message USING btree (send_status);
--
-- Name: message_service_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_service_id_index ON public.message USING btree (service_id);
--
-- Name: message_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_user_id_index ON public.message USING btree (user_id);
--
-- Name: message_user_number_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX message_user_number_index ON public.message USING btree (user_number);
--
-- Name: opt_out_assignment_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX opt_out_assignment_id_index ON public.opt_out USING btree (assignment_id);
--
-- Name: opt_out_cell_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX opt_out_cell_index ON public.opt_out USING btree (cell);
--
-- Name: opt_out_organization_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX opt_out_organization_id_index ON public.opt_out USING btree (organization_id);
--
-- Name: pending_message_part_parent_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX pending_message_part_parent_id_index ON public.pending_message_part USING btree (parent_id);
--
-- Name: pending_message_part_service_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX pending_message_part_service_index ON public.pending_message_part USING btree (service);
--
-- Name: question_response_campaign_contact_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX question_response_campaign_contact_id_index ON public.question_response USING btree (campaign_contact_id);
--
-- Name: question_response_interaction_step_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX question_response_interaction_step_id_index ON public.question_response USING btree (interaction_step_id);
--
-- Name: tag_campaign_contact_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX tag_campaign_contact_id_index ON public.tag USING btree (campaign_contact_id);
--
-- Name: tag_tag_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX tag_tag_index ON public.tag USING btree (tag);
--
-- Name: user_auth0_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_auth0_id_index ON public."user" USING btree (auth0_id);
--
-- Name: user_cell_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_cell_user_id_index ON public.user_cell USING btree (user_id);
--
-- Name: user_email_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_email_index ON public."user" USING btree (email);
--
-- Name: user_organization_organization_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_organization_organization_id_index ON public.user_organization USING btree (organization_id);
--
-- Name: user_organization_organization_id_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_organization_organization_id_user_id_index ON public.user_organization USING btree (organization_id, user_id);
--
-- Name: user_organization_user_id_index; Type: INDEX; Schema: public; Owner: -
--
CREATE INDEX user_organization_user_id_index ON public.user_organization USING btree (user_id);
--
-- Name: assignment assignment_campaign_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.assignment
ADD CONSTRAINT assignment_campaign_id_foreign FOREIGN KEY (campaign_id) REFERENCES public.campaign(id);
--
-- Name: assignment assignment_user_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.assignment
ADD CONSTRAINT assignment_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id);
--
-- Name: campaign_contact campaign_contact_assignment_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign_contact
ADD CONSTRAINT campaign_contact_assignment_id_foreign FOREIGN KEY (assignment_id) REFERENCES public.assignment(id);
--
-- Name: campaign_contact campaign_contact_campaign_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign_contact
ADD CONSTRAINT campaign_contact_campaign_id_foreign FOREIGN KEY (campaign_id) REFERENCES public.campaign(id);
--
-- Name: campaign campaign_creator_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign
ADD CONSTRAINT campaign_creator_id_foreign FOREIGN KEY (creator_id) REFERENCES public."user"(id);
--
-- Name: campaign campaign_organization_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.campaign
ADD CONSTRAINT campaign_organization_id_foreign FOREIGN KEY (organization_id) REFERENCES public.organization(id);
--
-- Name: canned_response canned_response_campaign_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.canned_response
ADD CONSTRAINT canned_response_campaign_id_foreign FOREIGN KEY (campaign_id) REFERENCES public.campaign(id);
--
-- Name: canned_response canned_response_user_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.canned_response
ADD CONSTRAINT canned_response_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id);
--
-- Name: interaction_step interaction_step_campaign_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.interaction_step
ADD CONSTRAINT interaction_step_campaign_id_foreign FOREIGN KEY (campaign_id) REFERENCES public.campaign(id);
--
-- Name: interaction_step interaction_step_parent_interaction_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.interaction_step
ADD CONSTRAINT interaction_step_parent_interaction_id_foreign FOREIGN KEY (parent_interaction_id) REFERENCES public.interaction_step(id);
--
-- Name: job_request job_request_campaign_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.job_request
ADD CONSTRAINT job_request_campaign_id_foreign FOREIGN KEY (campaign_id) REFERENCES public.campaign(id);
--
-- Name: message message_assignment_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_assignment_id_foreign FOREIGN KEY (assignment_id) REFERENCES public.assignment(id);
--
-- Name: message message_canned_response_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_canned_response_id_foreign FOREIGN KEY (canned_response_id) REFERENCES public.canned_response(id);
--
-- Name: message message_user_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.message
ADD CONSTRAINT message_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id);
--
-- Name: opt_out opt_out_assignment_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.opt_out
ADD CONSTRAINT opt_out_assignment_id_foreign FOREIGN KEY (assignment_id) REFERENCES public.assignment(id);
--
-- Name: opt_out opt_out_organization_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.opt_out
ADD CONSTRAINT opt_out_organization_id_foreign FOREIGN KEY (organization_id) REFERENCES public.organization(id);
--
-- Name: question_response question_response_campaign_contact_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.question_response
ADD CONSTRAINT question_response_campaign_contact_id_foreign FOREIGN KEY (campaign_contact_id) REFERENCES public.campaign_contact(id);
--
-- Name: question_response question_response_interaction_step_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.question_response
ADD CONSTRAINT question_response_interaction_step_id_foreign FOREIGN KEY (interaction_step_id) REFERENCES public.interaction_step(id);
--
-- Name: tag tag_campaign_contact_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.tag
ADD CONSTRAINT tag_campaign_contact_id_foreign FOREIGN KEY (campaign_contact_id) REFERENCES public.campaign_contact(id);
--
-- Name: tag tag_created_by_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.tag
ADD CONSTRAINT tag_created_by_foreign FOREIGN KEY (created_by) REFERENCES public."user"(id);
--
-- Name: tag tag_resolved_by_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.tag
ADD CONSTRAINT tag_resolved_by_foreign FOREIGN KEY (resolved_by) REFERENCES public."user"(id);
--
-- Name: user_cell user_cell_user_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_cell
ADD CONSTRAINT user_cell_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id);
--
-- Name: user_organization user_organization_organization_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_organization
ADD CONSTRAINT user_organization_organization_id_foreign FOREIGN KEY (organization_id) REFERENCES public.organization(id);
--
-- Name: user_organization user_organization_user_id_foreign; Type: FK CONSTRAINT; Schema: public; Owner: -
--
ALTER TABLE ONLY public.user_organization
ADD CONSTRAINT user_organization_user_id_foreign FOREIGN KEY (user_id) REFERENCES public."user"(id); | the_stack |
analyze r_existing;
analyze ex;
analyze concept_relationship_stage;
drop index if exists irs_both;
create index irs_both on internal_relationship_stage (concept_code_1,concept_code_2);
analyze internal_relationship_stage;
DROP TABLE IF EXISTS map_drug;
CREATE TABLE map_drug AS
SELECT from_code,
to_id,
'00'::VARCHAR AS map_order
FROM maps_to
where to_id>0;
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'1' -- Map Marketed Form to corresponding Branded/Clinical Drug (save box size and quant factor)
FROM r_existing r
JOIN ex e ON r.quant_value = e.r_value
AND r.quant_unit_id = e.quant_unit_id
AND r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bn_id = e.bn_id
AND r.bs = e.bs
AND r.mf_id = 0 AND e.mf_id!=0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage limit 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'2' -- Kick box size out
FROM r_existing r
JOIN ex e ON r.quant_value = e.r_value
AND r.quant_unit_id = e.quant_unit_id
AND r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bn_id = e.bn_id
AND r.bs = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage limit 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'3' -- Kick Quant factor out
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bn_id = e.bn_id
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.bs = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage limit 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'3' -- Kick BN out, save Quant factor
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.quant_value = e.r_value
AND r.quant_unit_id = e.quant_unit_id
AND r.bs = 0
AND r.mf_id = 0
AND r.bn_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage limit 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'4' -- Map Branded Drug to corresponding Clinical Drug (save box size)
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bs = e.bs
AND r.bn_id = 0
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage limit 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'5' -- Map Branded Drug to corresponding Clinical Drug
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bn_id = 0
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.bs = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage LIMIT 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'6' -- Branded Drug Form
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.bn_id = e.bn_id
AND trim(r.d_combo) ='' -- was ' ' in r_existing.d_combo
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.bs = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage LIMIT 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
r.concept_id,
'7' -- Branded Drug Comp
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND r.bn_id = e.bn_id
AND r.df_id = 0
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.bs = 0
AND r.mf_id = 0
--and e.concept_id like '-%'
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage LIMIT 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT cr.concept_code_1,
first_value(r.concept_id) OVER (
PARTITION BY cr.concept_code_1 ORDER BY rc2.precedence
),
'8' -- Clinical Drug Form
FROM r_existing r
JOIN ex e ON r.i_combo = e.ri_combo
AND trim(r.d_combo) =''
AND r.bn_id = 0
AND r.quant_value = 0
AND r.quant_unit_id = 0
AND r.bs = 0
AND r.mf_id = 0
AND e.concept_id<0
JOIN relationship_to_concept rc ON rc.concept_id_2 = e.df_id
JOIN relationship_to_concept rc2 ON rc.concept_code_1 = rc2.concept_code_1
AND rc2.concept_id_2 = r.df_id
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage LIMIT 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
JOIN internal_relationship_stage i on
rc.concept_code_1 = i.concept_code_2 and
cr.concept_code_1 = i.concept_code_1
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
WITH e AS (
SELECT e.concept_id,
e.concept_code,
u.rd_combo,
u.ri_combo,
coalesce(length(e.ri_combo) - length(replace(e.ri_combo, '-', '')), 0) cnt
FROM ex e,
unnest((
SELECT regexp_split_to_array(e.rd_combo, '-')
), (
SELECT regexp_split_to_array(e.ri_combo, '-')
)) AS u(rd_combo, ri_combo)
),
r AS (
SELECT count(r.concept_id) OVER (PARTITION BY e.concept_id) AS cnt_2,
e.concept_id,
r.concept_id AS r_concept_id
FROM e
JOIN r_existing r ON r.i_combo = e.ri_combo
AND r.d_combo = e.rd_combo
AND concept_class_id = 'Clinical Drug Comp'
)
SELECT DISTINCT cr.concept_code_1,
r_concept_id,
'9' -- Clinical Drug Comp
FROM r
JOIN e using (concept_id)
JOIN concept_relationship_stage cr ON cr.concept_code_2 = e.concept_code
AND cr.relationship_id = 'Maps to'
AND cr.vocabulary_id_1 = (
SELECT vocabulary_id
FROM drug_concept_stage LIMIT 1
)
AND cr.vocabulary_id_2 = 'RxNorm Extension'
WHERE NOT exists (
SELECT 1
FROM map_drug
where cr.concept_code_1 = from_code
)
AND cnt_2 = cnt + 1;-- take only those where components counts are equal
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT i.concept_code_1,
c.concept_id,
'10' -- Drug to ingredient
FROM internal_relationship_stage i
JOIN drug_concept_stage ON i.concept_code_2 = concept_code
AND concept_class_id = 'Ingredient'
JOIN concept_relationship_stage cr ON cr.concept_code_1 = concept_code
AND relationship_id = 'Maps to'
JOIN concept c ON c.concept_code = cr.concept_code_2
AND c.vocabulary_id LIKE 'Rx%'
WHERE NOT exists (
SELECT 1
FROM map_drug
where i.concept_code_1 = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
)
SELECT DISTINCT i.concept_code_2,
c.concept_id,
'11' -- add the set of source attributes
FROM internal_relationship_stage i
JOIN drug_concept_stage ON i.concept_code_2 = concept_code
AND concept_class_id IN (
'Ingredient',
'Brand Name',
'Suppier',
'Dose Form'
)
JOIN concept_relationship_stage cr ON cr.concept_code_1 = concept_code
AND relationship_id in ('Maps to','Source - RxNorm eq')
JOIN concept c ON c.concept_code = cr.concept_code_2
AND c.vocabulary_id in ('RxNorm','RxNorm Extension')
WHERE NOT exists (
SELECT 1
FROM map_drug
where i.concept_code_2 = from_code
);
--Proceed packs
INSERT INTO map_drug (
from_code,
to_id,
map_order
) -- existing mapping
SELECT DISTINCT pack_concept_code,
pack_concept_id,
'12'
FROM q_existing_pack q
JOIN r_existing_pack using (
components,
cnt,
bn_id,
bs,
mf_id
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
) -- Map Packs to corresponding Rx Packs without a supplier
SELECT DISTINCT pack_concept_code,
pack_concept_id,
'13'
FROM q_existing_pack q
JOIN r_existing_pack using (
components,
cnt,
bn_id,
bs
)
WHERE NOT exists (
SELECT 1
FROM map_drug
where pack_concept_code = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
) -- Map Packs to corresponding Rx Packs without a supplier and box_size
SELECT DISTINCT pack_concept_code,
pack_concept_id,
'14'
FROM q_existing_pack q
JOIN r_existing_pack using (
components,
cnt,
bn_id
)
WHERE NOT exists (
SELECT 1
FROM map_drug
where pack_concept_code = from_code
);
INSERT INTO map_drug (
from_code,
to_id,
map_order
) -- Map Packs to corresponding Rx Packs without a supplier, box size and brand name
SELECT DISTINCT pack_concept_code,
pack_concept_id,
'15'
FROM q_existing_pack q
JOIN r_existing_pack using (
components,
cnt
)
WHERE NOT exists (
SELECT 1
FROM map_drug
where pack_concept_code = from_code
);
DELETE
FROM map_drug
WHERE from_code LIKE 'OMOP%';--delete newly created concepts not to overload concept table
--delete all unnecessary concepts
TRUNCATE TABLE concept_relationship_stage;
TRUNCATE TABLE pack_content_stage;
TRUNCATE TABLE drug_strength_stage;
INSERT INTO concept_relationship_stage (
concept_code_1,
concept_code_2,
vocabulary_id_1,
vocabulary_id_2,
relationship_id,
valid_start_date,
valid_end_date
)
SELECT from_code,
c.concept_code,
dc.vocabulary_id,
c.vocabulary_id,
CASE
WHEN dc.concept_class_id IN (
'Brand Name',
'Suppier',
'Dose Form'
)
THEN 'Source - RxNorm eq'
ELSE 'Maps to'
END,
CURRENT_DATE,
to_date('20991231', 'yyyymmdd')
FROM map_drug m
JOIN drug_concept_stage dc ON dc.concept_code = m.from_code
JOIN concept c ON to_id = c.concept_id
UNION
SELECT concept_code,
concept_code,
vocabulary_id,
vocabulary_id,
'Maps to',
CURRENT_DATE,
to_date('20991231', 'yyyymmdd')
FROM drug_concept_stage
WHERE domain_id = 'Device';
DELETE
FROM concept_stage
WHERE concept_code LIKE 'OMOP%';--save devices and unmapped drug
UPDATE concept_stage
SET standard_concept = NULL
WHERE concept_code IN (
SELECT a.concept_code
FROM concept_stage a
LEFT JOIN concept_relationship_stage ON concept_code_1 = a.concept_code
AND vocabulary_id_1 = a.vocabulary_id
LEFT JOIN concept c ON c.concept_code = concept_code_2
AND c.vocabulary_id = vocabulary_id_2
WHERE a.standard_concept = 'S'
AND c.concept_id IS NULL
);
drop index if exists irs_both; | the_stack |
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for core_agency
-- ----------------------------
DROP TABLE IF EXISTS `core_agency`;
CREATE TABLE `core_agency` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`recharge_by_app` int(11) NOT NULL DEFAULT '0' COMMENT '充值任务币时平台给出的返现提成,百分比,乘以100',
`withdraw_by_user` int(11) NOT NULL DEFAULT '0' COMMENT '提现收入时用户从自己的钱拿出来给上级返现的比例',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_banner
-- ----------------------------
DROP TABLE IF EXISTS `core_banner`;
CREATE TABLE `core_banner` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`img` varchar(255) DEFAULT NULL COMMENT 'banner图片地址',
`html` mediumtext COMMENT '点击后跳转连接,可以是html代码,也可以是网址',
`sort` int(11) DEFAULT NULL COMMENT '排序',
`usable` tinyint(1) DEFAULT NULL COMMENT '是否可用',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=11 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_currency_change
-- ----------------------------
DROP TABLE IF EXISTS `core_currency_change`;
CREATE TABLE `core_currency_change` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`record` int(11) DEFAULT NULL COMMENT '货币变动值,可以正数也可以负数',
`type_property_id` int(11) DEFAULT NULL COMMENT '货币的类型',
`reason_property_id` int(11) DEFAULT NULL COMMENT '货币变化的原因',
`user_id` int(11) DEFAULT NULL COMMENT '用户id',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=948 DEFAULT CHARSET=utf8 COMMENT='角色表';
-- ----------------------------
-- Table structure for core_deposit_purchase
-- ----------------------------
DROP TABLE IF EXISTS `core_deposit_purchase`;
CREATE TABLE `core_deposit_purchase` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL COMMENT '用户id',
`deposit` int(11) DEFAULT NULL COMMENT '保证金金额,单位分',
`refund_time` timestamp NULL DEFAULT NULL COMMENT '申退保证金日期',
`refund_finish_time` timestamp NULL DEFAULT NULL COMMENT '申退保证金完成',
`usable` tinyint(1) NOT NULL COMMENT '当前保证金是否可用',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_img
-- ----------------------------
DROP TABLE IF EXISTS `core_img`;
CREATE TABLE `core_img` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`data_id` int(11) DEFAULT NULL COMMENT '具体的ID,比如发布任务是core_mission_detail,提交任务是core_mission_accept的id,意见反馈是sys_advice的id',
`user_id` int(11) NOT NULL COMMENT '任务发布者的ID,或者任务接取者的ID',
`path` varchar(255) NOT NULL COMMENT '图片的本地路径',
`type` int(11) DEFAULT NULL COMMENT '此字段主要是为了区分同一个id却有多个需要区分的图片,比如创建任务,需要上传审核图片和操作图片',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `data_id` (`data_id`,`user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1315 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_login_auth
-- ----------------------------
DROP TABLE IF EXISTS `core_login_auth`;
CREATE TABLE `core_login_auth` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) NOT NULL COMMENT '用户表,用户ID',
`account` varchar(100) DEFAULT NULL COMMENT '登陆账号,如果是三方登陆的话,应该是三方的id。比如微信的是openid',
`password` varchar(100) DEFAULT NULL COMMENT '登陆的密码,存储的是加密后的密码。如果是三方登陆的话,有可能是token,',
`type_property_id` int(11) DEFAULT NULL COMMENT '登陆类型,属性表的id',
`data` varchar(1500) DEFAULT NULL COMMENT '某些三方登录返回的json',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `account` (`account`,`type_property_id`) USING BTREE,
KEY `user_id` (`user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=136 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission
-- ----------------------------
DROP TABLE IF EXISTS `core_mission`;
CREATE TABLE `core_mission` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`mission_detail_id` int(10) unsigned NOT NULL,
`user_id` int(11) NOT NULL COMMENT '任务发布者的id',
`publish_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '任务发布时间',
`status_property_id` int(11) DEFAULT NULL COMMENT '任务当前状态(被封、数量完成、到期等各种情况)\r\n29被封\r\n32发布\r\n57审核驳回\r\n28待审核\r\n30任务截止\r\n31任务全部完成\r\n',
`top_end_time` timestamp NULL DEFAULT NULL COMMENT '任务置顶结束时间',
`reason` varchar(255) DEFAULT NULL COMMENT '任务审核驳回或者被封,保存原因',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `fk_core_mission_core_mission_detail` (`mission_detail_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1000402 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission_accept
-- ----------------------------
DROP TABLE IF EXISTS `core_mission_accept`;
CREATE TABLE `core_mission_accept` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`mission_id` int(11) NOT NULL COMMENT '接取的任务ID',
`accept_user_id` int(11) DEFAULT NULL COMMENT '接单者ID',
`publish_user_id` int(11) DEFAULT NULL COMMENT '发单者ID',
`accept_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT '接受任务时间',
`finish_time` timestamp NULL DEFAULT NULL COMMENT '完成任务时间,或者任务不合格时,12小时内用户没有再次上传,12小时后就会填上完成时间,或者已超时(接取20分钟后),或者截止和被封',
`upload_time` timestamp NULL DEFAULT NULL COMMENT '提交任务时间,每次提交都会更新',
`review_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '商家审核时间,每次审核都会更新',
`proceed_property_id` int(11) DEFAULT NULL COMMENT '任务执行情况',
`text_verify` varchar(255) DEFAULT NULL COMMENT '提交任务时的文字验证',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `user_id` (`accept_user_id`) USING BTREE,
KEY `core_mission_accept_proceed_property_id` (`proceed_property_id`) USING BTREE,
KEY `core_mission_accept_mission_id` (`mission_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=51192 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission_complaint
-- ----------------------------
DROP TABLE IF EXISTS `core_mission_complaint`;
CREATE TABLE `core_mission_complaint` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`mission_accept_id` int(11) unsigned NOT NULL,
`complainter_user_id` int(11) NOT NULL COMMENT '任务申诉者的id',
`text` varchar(500) NOT NULL COMMENT '申诉内容',
`result` tinyint(1) DEFAULT NULL COMMENT '投诉是否成功,1代表申诉成功',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uni_core_mission_complaint_complainter_mission_accept_id` (`mission_accept_id`) USING BTREE,
KEY `idx_core_mission_complaint_complainter_user_id` (`complainter_user_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission_detail
-- ----------------------------
DROP TABLE IF EXISTS `core_mission_detail`;
CREATE TABLE `core_mission_detail` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`type_property_id` int(11) NOT NULL COMMENT '发布任务的类型,在属性表中的id',
`user_id` int(11) NOT NULL COMMENT '发布人id',
`mobile_property_id` int(11) NOT NULL COMMENT '属性表里面的支持设备类型',
`title` varchar(12) NOT NULL COMMENT '标题,12字以内',
`deadline_time` timestamp NOT NULL COMMENT '截止时间',
`price` int(11) NOT NULL COMMENT '出价,单位分',
`count` int(11) NOT NULL COMMENT '剩余未完成的任务数量',
`publish_count` int(11) NOT NULL COMMENT '任务发布时的数量,发布后不会改变',
`fee_price` int(11) NOT NULL COMMENT '服务费金额,单位为分',
`fee_percent` int(11) NOT NULL COMMENT '手续费的百分比,乘以100之后的数值',
`mission_rule_id` int(11) NOT NULL COMMENT '任务规则的id',
`url` varchar(500) DEFAULT NULL COMMENT '链接',
`text_verify` varchar(500) DEFAULT NULL COMMENT '文字验证',
`remark` varchar(255) DEFAULT NULL COMMENT '备注',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `user_id` (`user_id`) USING BTREE,
KEY `type_property_id` (`type_property_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=400 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission_detail_step
-- ----------------------------
DROP TABLE IF EXISTS `core_mission_detail_step`;
CREATE TABLE `core_mission_detail_step` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`mission_detail_id` int(11) DEFAULT NULL,
`haveImg` tinyint(1) DEFAULT NULL COMMENT '是否包含图片',
`text` varchar(255) DEFAULT NULL COMMENT '文字',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_core_mission_detail_step_mission_detail_id` (`mission_detail_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=632 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_mission_rule
-- ----------------------------
DROP TABLE IF EXISTS `core_mission_rule`;
CREATE TABLE `core_mission_rule` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`type_property_id` int(11) DEFAULT NULL COMMENT '任务类型的属性表ID',
`min_price` int(11) DEFAULT NULL COMMENT '最低出价,单位为分',
`min_count` int(11) DEFAULT NULL COMMENT '最低发布的任务数量',
`verify_img_count` tinyint(4) DEFAULT '0' COMMENT '审核验证图最多上传数量',
`usable` tinyint(1) DEFAULT NULL COMMENT '是否可用。如果不可用,则任务列表不显示。已接取的还可以完成',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uni_core_mission_rule_type_property_id` (`type_property_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=17 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_notice
-- ----------------------------
DROP TABLE IF EXISTS `core_notice`;
CREATE TABLE `core_notice` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`text` varchar(255) DEFAULT NULL COMMENT '文字内容',
`html` mediumtext COMMENT '点击后跳转连接,可以是html代码,也可以是网址',
`sort` int(11) DEFAULT NULL COMMENT '排序',
`usable` tinyint(1) DEFAULT NULL COMMENT '是否可用',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_partner
-- ----------------------------
DROP TABLE IF EXISTS `core_partner`;
CREATE TABLE `core_partner` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(10) NOT NULL COMMENT '合作商的级别名字',
`level` tinyint(4) DEFAULT NULL COMMENT '合作商级别,0是非合作商,级别越高数越大',
`fee_percent` int(11) NOT NULL COMMENT '交易手续费的百分比,乘以100之后的数值',
`mission_payment_percent` int(11) NOT NULL COMMENT '任务币的提现手续费,乘以100之后的数值',
`min_fee_price` int(11) NOT NULL COMMENT '发布任务最低的服务费金额,单位分',
`mouth_price` int(11) NOT NULL DEFAULT '0' COMMENT '月卡价格,单位分/月',
`year_price` int(11) NOT NULL DEFAULT '0' COMMENT '年卡价格,单位分/年',
`ad_hour` int(11) NOT NULL DEFAULT '0' COMMENT '免费推荐的时间,单位小时/日',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uni_core_partner_level` (`level`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_partner_purchase
-- ----------------------------
DROP TABLE IF EXISTS `core_partner_purchase`;
CREATE TABLE `core_partner_purchase` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL COMMENT '用户id',
`partner_id` int(11) DEFAULT NULL COMMENT '合作商id',
`price` int(11) DEFAULT NULL COMMENT '花费的钱,单位分',
`start_time` date DEFAULT NULL COMMENT '合作商开始日期,购买立即开始算时间',
`end_time` date DEFAULT NULL COMMENT '合作商结束日期,这个日期的零时过期',
`time_type` tinyint(4) DEFAULT NULL COMMENT '1为月卡,2位年卡',
`usable` tinyint(1) NOT NULL COMMENT '当前合作商是否可用',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_qrcode
-- ----------------------------
DROP TABLE IF EXISTS `core_qrcode`;
CREATE TABLE `core_qrcode` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) unsigned NOT NULL,
`uuid` varchar(32) NOT NULL COMMENT '生成唯一二维码用的随机UUID',
`expire_time` timestamp NULL DEFAULT NULL COMMENT '到期时间,暂时没有做什么判断',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uni_core_qrcode_uuid` (`uuid`) USING BTREE,
KEY `fk_core_qrcode_sys_user` (`user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=35 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_question
-- ----------------------------
DROP TABLE IF EXISTS `core_question`;
CREATE TABLE `core_question` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`title` varchar(255) NOT NULL COMMENT '标题',
`msg` varchar(5000) NOT NULL COMMENT '内容',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_top
-- ----------------------------
DROP TABLE IF EXISTS `core_top`;
CREATE TABLE `core_top` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL COMMENT '用户id',
`mission_id` int(11) DEFAULT NULL COMMENT '任务ID',
`free_hours` int(11) DEFAULT NULL COMMENT '免费小时数',
`paid_hours` int(11) DEFAULT NULL COMMENT '收费的小时数',
`price` int(11) DEFAULT NULL COMMENT '收费小时数花费的钱',
`top_end_time` timestamp NULL DEFAULT NULL COMMENT '置顶结束时间',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for core_version
-- ----------------------------
DROP TABLE IF EXISTS `core_version`;
CREATE TABLE `core_version` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`version_code` int(11) DEFAULT NULL COMMENT '版本号',
`version_name` varchar(20) DEFAULT NULL COMMENT '版本名',
`wgt_filename` varchar(255) DEFAULT NULL COMMENT 'wgt文件名',
`force_update` tinyint(1) DEFAULT NULL COMMENT '强制更新',
`deployed` tinyint(1) DEFAULT NULL COMMENT 'wgt和html是否部署完成',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=87 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for financial_trade
-- ----------------------------
DROP TABLE IF EXISTS `financial_trade`;
CREATE TABLE `financial_trade` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`trade_no` varchar(64) DEFAULT NULL COMMENT '单号,userId_reason_amount_now_random',
`amount` int(11) DEFAULT NULL COMMENT '金额,单位分',
`title` varchar(50) DEFAULT NULL COMMENT '标题',
`description` varchar(50) DEFAULT NULL COMMENT '内容',
`third_trade_no` varchar(64) DEFAULT NULL COMMENT '第三方订单号',
`finished` tinyint(1) DEFAULT NULL COMMENT '交易状态,0是未完成,1是已完成',
`finish_time` timestamp NULL DEFAULT NULL COMMENT '支付完成时间',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `finacial_trade_trade_no` (`trade_no`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=205 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for financial_transfer
-- ----------------------------
DROP TABLE IF EXISTS `financial_transfer`;
CREATE TABLE `financial_transfer` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) NOT NULL COMMENT '申请提现用户的id',
`withdrawal_id` int(11) NOT NULL COMMENT 'FinancialWithdrawal提现申请表的id',
`third_transfer_no` varchar(64) NOT NULL COMMENT '第三方支付的转账单号',
`transfer_no` varchar(64) NOT NULL COMMENT '系统生成的转账单号',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `finacial_transfer_user_id` (`user_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for financial_withdrawal
-- ----------------------------
DROP TABLE IF EXISTS `financial_withdrawal`;
CREATE TABLE `financial_withdrawal` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) unsigned NOT NULL,
`money` int(11) DEFAULT NULL COMMENT '提现人民币数值',
`reason_property_id` int(11) DEFAULT NULL COMMENT '货币变化的原因',
`approve` tinyint(1) DEFAULT NULL COMMENT '提现申请是否批准',
`approve_time` timestamp NULL DEFAULT NULL COMMENT '提现批准时间',
`approve_user_id` int(11) DEFAULT NULL COMMENT '批准人',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for pay_payment_mode
-- ----------------------------
DROP TABLE IF EXISTS `pay_payment_mode`;
CREATE TABLE `pay_payment_mode` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`type_property_id` int(11) DEFAULT NULL COMMENT '绑定的提现方式ID,比如支付宝、微信',
`name` varchar(10) DEFAULT NULL COMMENT '真实姓名',
`account` varchar(30) DEFAULT NULL COMMENT '账号',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `user_id` (`user_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for social_follow
-- ----------------------------
DROP TABLE IF EXISTS `social_follow`;
CREATE TABLE `social_follow` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`from_user_id` int(11) DEFAULT NULL COMMENT '关注者',
`to_user_id` int(11) DEFAULT NULL COMMENT '被关注者',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `from_user_id` (`from_user_id`) USING BTREE,
KEY `to_user_id` (`to_user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for social_notice
-- ----------------------------
DROP TABLE IF EXISTS `social_notice`;
CREATE TABLE `social_notice` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`type_property_id` int(11) DEFAULT NULL COMMENT '通知类型',
`msg1` varchar(255) DEFAULT NULL,
`msg2` varchar(255) DEFAULT NULL,
`msg3` varchar(255) DEFAULT NULL,
`msg4` varchar(255) DEFAULT NULL,
`msg5` varchar(255) DEFAULT NULL,
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `user_id` (`user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=919 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for social_review_chat
-- ----------------------------
DROP TABLE IF EXISTS `social_review_chat`;
CREATE TABLE `social_review_chat` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`mission_id` int(11) DEFAULT NULL COMMENT '任务ID',
`mission_accept_id` int(11) DEFAULT NULL COMMENT '接任务的ID',
`from_user_id` int(11) DEFAULT NULL COMMENT '信息发出者,如果是null,说明是官方',
`to_user_id` int(11) DEFAULT NULL COMMENT '信息接受者',
`include_img` tinyint(1) DEFAULT NULL COMMENT '是否包含图片',
`text` varchar(255) DEFAULT NULL COMMENT '文字',
`status` varchar(30) DEFAULT NULL COMMENT '状态文字',
`previous_chat_id` int(11) DEFAULT NULL COMMENT '上一条对话ID,如果是第一条为null',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=153 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for sys_advice
-- ----------------------------
DROP TABLE IF EXISTS `sys_advice`;
CREATE TABLE `sys_advice` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`message` text COMMENT '意见反馈内容',
`contact_info` varchar(50) DEFAULT NULL COMMENT '联系方式',
`have_img` tinyint(1) DEFAULT NULL COMMENT '是否包含图片',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for sys_menu
-- ----------------------------
DROP TABLE IF EXISTS `sys_menu`;
CREATE TABLE `sys_menu` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`parent_id` int(10) unsigned NOT NULL,
`name` varchar(100) NOT NULL COMMENT '菜单标题',
`sort` decimal(10,0) NOT NULL COMMENT '排序',
`href` varchar(255) DEFAULT NULL COMMENT '链接',
`icon` varchar(100) DEFAULT NULL COMMENT '图标',
`is_show` tinyint(1) NOT NULL DEFAULT '1' COMMENT '是否在菜单中显示',
`component` varchar(255) DEFAULT NULL COMMENT '组件的路径',
`component_name` varchar(255) DEFAULT NULL COMMENT '前端组件的名字',
`permission` varchar(200) DEFAULT NULL COMMENT '权限标识',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `uni_sys_menu_permission` (`permission`) USING BTREE,
KEY `idx_sys_menu_parent_id` (`parent_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=62 DEFAULT CHARSET=utf8 COMMENT='菜单表';
-- ----------------------------
-- Table structure for sys_property
-- ----------------------------
DROP TABLE IF EXISTS `sys_property`;
CREATE TABLE `sys_property` (
`id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT 'id对应所有类型枚举的propertyId值',
`type` tinyint(4) NOT NULL COMMENT '属性类型名称,如果出现新的type值,需要再代码里面PropertyType里面添加',
`name` varchar(30) NOT NULL COMMENT '属性名字',
`value` mediumtext COMMENT '属性的具体的值,可为空',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `type` (`type`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=79 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for sys_role
-- ----------------------------
DROP TABLE IF EXISTS `sys_role`;
CREATE TABLE `sys_role` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`name` varchar(100) NOT NULL COMMENT '角色名称(一般是中文)',
`role_name` varchar(255) DEFAULT NULL COMMENT '角色英文名称',
`useable` tinyint(1) DEFAULT NULL COMMENT '是否可用',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_sys_role_role_name` (`role_name`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8 COMMENT='角色表';
-- ----------------------------
-- Table structure for sys_role_menu
-- ----------------------------
DROP TABLE IF EXISTS `sys_role_menu`;
CREATE TABLE `sys_role_menu` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`role_id` int(11) NOT NULL COMMENT '角色ID',
`menu_id` int(11) NOT NULL COMMENT '菜单ID',
`add_permission` varchar(30) DEFAULT NULL,
`edit_permission` varchar(30) DEFAULT NULL,
`delete_permission` varchar(30) DEFAULT NULL,
`deleted` tinyint(11) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_sys_role_menu_role_id` (`role_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=203 DEFAULT CHARSET=utf8 COMMENT='角色-菜单';
-- ----------------------------
-- Table structure for sys_user
-- ----------------------------
DROP TABLE IF EXISTS `sys_user`;
CREATE TABLE `sys_user` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`pay_password` varchar(30) DEFAULT NULL COMMENT 'MD5后的支付密码',
`superior_id` int(11) unsigned DEFAULT NULL COMMENT '用户的上级id',
`agency_id` int(11) unsigned DEFAULT NULL COMMENT '我作为代理商的分成级别',
`partner_id` int(11) DEFAULT NULL COMMENT '代理商的id',
`nickname` varchar(20) DEFAULT NULL COMMENT '昵称',
`head_img_url` varchar(255) DEFAULT NULL COMMENT '三方登陆的头像地址',
`phone` varchar(20) DEFAULT NULL COMMENT '用户绑定的手机号',
`email` varchar(50) DEFAULT NULL COMMENT '邮箱',
`login_ip` varchar(30) DEFAULT NULL COMMENT '最后一次登陆IP',
`login_time` timestamp NULL DEFAULT NULL COMMENT '最后一次登陆日期',
`login_status_property_id` tinyint(4) DEFAULT NULL COMMENT '登陆状态,比如允许登陆、禁止登陆、需绑定手机才可以登陆等等',
`user_type` tinyint(4) DEFAULT NULL COMMENT '用户类型',
`deposit` int(11) DEFAULT NULL COMMENT '保证金,单位为分',
`mission_coin` int(11) DEFAULT NULL COMMENT '任务币,单位分',
`earning` int(11) DEFAULT NULL COMMENT '我当前收入,包括任务收入和分红,单位分',
`alipay` varchar(30) DEFAULT NULL COMMENT '支付宝账号',
`real_name` varchar(10) DEFAULT NULL COMMENT '真实姓名',
`push_id` varchar(100) DEFAULT NULL COMMENT 'unipush的id',
`device_info` varchar(255) DEFAULT NULL COMMENT '设备信息',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=100159 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Table structure for sys_user_role
-- ----------------------------
DROP TABLE IF EXISTS `sys_user_role`;
CREATE TABLE `sys_user_role` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '用户自增id',
`user_id` int(11) NOT NULL COMMENT '用户ID',
`role_id` int(11) NOT NULL COMMENT '角色ID',
`deleted` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否删除',
`creator` varchar(20) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updater` varchar(20) DEFAULT NULL,
`update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_user_id` (`user_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8 COMMENT='用户-角色';
SET FOREIGN_KEY_CHECKS = 1; | the_stack |
--
-- AGGREGATES
--
--Testcase 266:
CREATE EXTENSION duckdb_fdw;
--Testcase 267:
CREATE SERVER sqlite_svr FOREIGN DATA WRAPPER duckdb_fdw
OPTIONS (database '/tmp/sqlitefdw_test_core.db');
--Testcase 268:
CREATE FOREIGN TABLE onek(
unique1 int4 OPTIONS (key 'true'),
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
) SERVER sqlite_svr;
--Testcase 269:
CREATE FOREIGN TABLE aggtest (
a int2,
b float4
) SERVER sqlite_svr;
--Testcase 270:
CREATE FOREIGN TABLE student (
name text,
age int4,
location point,
gpa float8
) SERVER sqlite_svr;
--Testcase 271:
CREATE FOREIGN TABLE tenk1 (
unique1 int4,
unique2 int4,
two int4,
four int4,
ten int4,
twenty int4,
hundred int4,
thousand int4,
twothousand int4,
fivethous int4,
tenthous int4,
odd int4,
even int4,
stringu1 name,
stringu2 name,
string4 name
) SERVER sqlite_svr;
--Testcase 272:
CREATE FOREIGN TABLE INT8_TBL(
q1 int8 OPTIONS (key 'true'),
q2 int8 OPTIONS (key 'true')
) SERVER sqlite_svr;
--Testcase 273:
CREATE FOREIGN TABLE INT4_TBL(f1 int4 OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 274:
CREATE FOREIGN TABLE multi_arg_agg (a int OPTIONS (key 'true'), b int, c text) SERVER sqlite_svr;
--Testcase 275:
CREATE FOREIGN TABLE VARCHAR_TBL(f1 varchar(4) OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 276:
CREATE FOREIGN TABLE FLOAT8_TBL(f1 float8 OPTIONS (key 'true')) SERVER sqlite_svr;
-- avoid bit-exact output here because operations may not be bit-exact.
SET extra_float_digits = 0;
--Testcase 1:
SELECT avg(four) AS avg_1 FROM onek;
--Testcase 2:
SELECT avg(a) AS avg_32 FROM aggtest WHERE a < 100;
-- In 7.1, avg(float4) is computed using float8 arithmetic.
--Testcase 3:
-- Round the result to limited digits to avoid platform-specific results.
SELECT avg(b)::numeric(10,3) AS avg_107_943 FROM aggtest;
--Testcase 4:
-- Round the result to limited digits to avoid platform-specific results.
SELECT avg(gpa)::numeric(10,3) AS avg_3_4 FROM ONLY student;
--Testcase 5:
SELECT sum(four) AS sum_1500 FROM onek;
--Testcase 6:
SELECT sum(a) AS sum_198 FROM aggtest;
--Testcase 7:
-- Round the result to limited digits to avoid platform-specific results.
SELECT sum(b)::numeric(10,3) AS avg_431_773 FROM aggtest;
--Testcase 8:
-- Round the result to limited digits to avoid platform-specific results.
SELECT sum(gpa)::numeric(10,3) AS avg_6_8 FROM ONLY student;
--Testcase 9:
SELECT max(four) AS max_3 FROM onek;
--Testcase 10:
SELECT max(a) AS max_100 FROM aggtest;
--Testcase 11:
SELECT max(aggtest.b) AS max_324_78 FROM aggtest;
--Testcase 12:
SELECT max(student.gpa) AS max_3_7 FROM student;
--Testcase 13:
-- Round the result to limited digits to avoid platform-specific results.
SELECT stddev_pop(b)::numeric(20,10) FROM aggtest;
--Testcase 14:
-- Round the result to limited digits to avoid platform-specific results.
SELECT stddev_samp(b)::numeric(20,10) FROM aggtest;
--Testcase 15:
-- Round the result to limited digits to avoid platform-specific results.
SELECT var_pop(b)::numeric(20,10) FROM aggtest;
--Testcase 16:
-- Round the result to limited digits to avoid platform-specific results.
SELECT var_samp(b)::numeric(20,10) FROM aggtest;
--Testcase 17:
SELECT stddev_pop(b::numeric) FROM aggtest;
--Testcase 18:
SELECT stddev_samp(b::numeric) FROM aggtest;
--Testcase 19:
SELECT var_pop(b::numeric) FROM aggtest;
--Testcase 20:
SELECT var_samp(b::numeric) FROM aggtest;
-- population variance is defined for a single tuple, sample variance
-- is not
--Testcase 277:
CREATE FOREIGN TABLE agg_t3(a float8, b float8, id integer OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 278:
DELETE FROM agg_t3;
--Testcase 279:
INSERT INTO agg_t3 values (1.0::float8, 2.0::float8);
--Testcase 280:
SELECT var_pop(a), var_samp(b) FROM agg_t3;
--Testcase 281:
DELETE FROM agg_t3;
--Testcase 282:
INSERT INTO agg_t3 values (3.0::float8, 4.0::float8);
--Testcase 283:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t3;
--Testcase 284:
DELETE FROM agg_t3;
--Testcase 285:
INSERT INTO agg_t3 values ('inf'::float8, 'inf'::float8);
--Testcase 286:
SELECT var_pop(a), var_samp(b) FROM agg_t3;
--Testcase 287:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t3;
--Testcase 288:
DELETE FROM agg_t3;
--Testcase 289:
INSERT INTO agg_t3 values ('nan'::float8, 'nan'::float8);
--Testcase 290:
SELECT var_pop(a), var_samp(b) FROM agg_t3;
--Testcase 291:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t3;
--Testcase 292:
CREATE FOREIGN TABLE agg_t4(a float4, b float4, id integer OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 293:
DELETE FROM agg_t4;
--Testcase 294:
INSERT INTO agg_t4 values (1.0::float4, 2.0::float4);
--Testcase 295:
SELECT var_pop(a), var_samp(b) FROM agg_t4;
--Testcase 296:
DELETE FROM agg_t4;
--Testcase 297:
INSERT INTO agg_t4 values (3.0::float4, 4.0::float4);
--Testcase 298:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t4;
--Testcase 299:
DELETE FROM agg_t4;
--Testcase 300:
INSERT INTO agg_t4 values ('inf'::float4, 'inf'::float4);
--Testcase 301:
SELECT var_pop(a), var_samp(b) FROM agg_t4;
--Testcase 302:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t4;
--Testcase 303:
DELETE FROM agg_t4;
--Testcase 304:
INSERT INTO agg_t4 values ('nan'::float4, 'nan'::float4);
--Testcase 305:
SELECT var_pop(a), var_samp(b) FROM agg_t4;
--Testcase 306:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t4;
--Testcase 307:
CREATE FOREIGN TABLE agg_t5(a numeric, b numeric, id integer OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 308:
DELETE FROM agg_t5;
--Testcase 309:
INSERT INTO agg_t5 values (1.0::numeric, 2.0::numeric);
--Testcase 310:
SELECT var_pop(a), var_samp(b) FROM agg_t5;
--Testcase 311:
DELETE FROM agg_t5;
--Testcase 312:
INSERT INTO agg_t5 values (3.0::numeric, 4.0::numeric);
--Testcase 313:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t5;
--Testcase 314:
DELETE FROM agg_t5;
--Testcase 315:
INSERT INTO agg_t5 values ('nan'::numeric, 'nan'::numeric);
--Testcase 316:
SELECT var_pop(a), var_samp(b) FROM agg_t5;
--Testcase 317:
SELECT stddev_pop(a), stddev_samp(b) FROM agg_t5;
-- verify correct results for null and NaN inputs
--Testcase 318:
CREATE FOREIGN TABLE agg_t8(a text OPTIONS (key 'true'), b text) SERVER sqlite_svr;
--Testcase 319:
DELETE FROM agg_t8;
--Testcase 320:
INSERT INTO agg_t8 select * from generate_series(1,3);
--Testcase 321:
select sum(null::int4) from agg_t8;
--Testcase 322:
select sum(null::int8) from agg_t8;
--Testcase 323:
select sum(null::numeric) from agg_t8;
--Testcase 324:
select sum(null::float8) from agg_t8;
--Testcase 325:
select avg(null::int4) from agg_t8;
--Testcase 326:
select avg(null::int8) from agg_t8;
--Testcase 327:
select avg(null::numeric) from agg_t8;
--Testcase 328:
select avg(null::float8) from agg_t8;
--Testcase 329:
select sum('NaN'::numeric) from agg_t8;
--Testcase 330:
select avg('NaN'::numeric) from agg_t8;
-- verify correct results for infinite inputs
--Testcase 331:
DELETE FROM agg_t3;
--Testcase 332:
INSERT INTO agg_t3 VALUES ('1'::float8), ('infinity'::float8);
--Testcase 333:
SELECT avg(a), var_pop(a) FROM agg_t3;
--Testcase 334:
DELETE FROM agg_t3;
--Testcase 335:
INSERT INTO agg_t3 VALUES ('infinity'::float8), ('1'::float8);
--Testcase 336:
SELECT avg(a), var_pop(a) FROM agg_t3;
--Testcase 337:
DELETE FROM agg_t3;
--Testcase 338:
INSERT INTO agg_t3 VALUES ('infinity'::float8), ('infinity'::float8);
--Testcase 339:
SELECT avg(a), var_pop(a) FROM agg_t3;
--Testcase 340:
DELETE FROM agg_t3;
--Testcase 341:
INSERT INTO agg_t3 VALUES ('-infinity'::float8), ('infinity'::float8);
--Testcase 342:
SELECT avg(a), var_pop(a) FROM agg_t3;
-- test accuracy with a large input offset
--Testcase 343:
CREATE FOREIGN TABLE agg_t6(a float8, id integer OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 344:
DELETE FROM agg_t6;
--Testcase 345:
INSERT INTO agg_t6 VALUES (100000003), (100000004), (100000006), (100000007);
--Testcase 346:
SELECT avg(a), var_pop(a) FROM agg_t6;
--Testcase 347:
DELETE FROM agg_t6;
--Testcase 348:
INSERT INTO agg_t6 VALUES (7000000000005), (7000000000007);
--Testcase 349:
SELECT avg(a), var_pop(a) FROM agg_t6;
-- SQL2003 binary aggregates
--Testcase 21:
SELECT regr_count(b, a) FROM aggtest;
--Testcase 22:
SELECT regr_sxx(b, a) FROM aggtest;
--Testcase 23:
-- Round the result to limited digits to avoid platform-specific results.
SELECT regr_syy(b, a)::numeric(20,10) FROM aggtest;
--Testcase 24:
-- Round the result to limited digits to avoid platform-specific results.
SELECT regr_sxy(b, a)::numeric(20,10) FROM aggtest;
--Testcase 25:
-- Round the result to limited digits to avoid platform-specific results.
SELECT regr_avgx(b, a), regr_avgy(b, a)::numeric(20,10) FROM aggtest;
--Testcase 26:
-- Round the result to limited digits to avoid platform-specific results.
SELECT regr_r2(b, a)::numeric(20,10) FROM aggtest;
--Testcase 27:
-- Round the result to limited digits to avoid platform-specific results.
SELECT regr_slope(b, a)::numeric(20,10), regr_intercept(b, a)::numeric(20,10) FROM aggtest;
--Testcase 28:
-- Round the result to limited digits to avoid platform-specific results.
SELECT covar_pop(b, a)::numeric(20,10), covar_samp(b, a)::numeric(20,10) FROM aggtest;
--Testcase 29:
-- Round the result to limited digits to avoid platform-specific results.
SELECT corr(b, a)::numeric(20,10) FROM aggtest;
-- check single-tuple behavior
--Testcase 350:
CREATE FOREIGN TABLE agg_t7(a float8, b float8, c float8, d float8, id integer OPTIONS (key 'true')) SERVER sqlite_svr;
--Testcase 351:
DELETE FROM agg_t7;
--Testcase 352:
INSERT INTO agg_t7 VALUES (1, 2, 3, 4);
--Testcase 353:
SELECT covar_pop(a,b), covar_samp(c,d) FROM agg_t7;
--Testcase 354:
DELETE FROM agg_t7;
--Testcase 355:
INSERT INTO agg_t7 VALUES (1, 'inf', 3, 'inf');
--Testcase 356:
SELECT covar_pop(a,b), covar_samp(c,d) FROM agg_t7;
--Testcase 357:
DELETE FROM agg_t7;
--Testcase 358:
INSERT INTO agg_t7 VALUES (1, 'nan', 3, 'nan');
--Testcase 359:
SELECT covar_pop(a,b), covar_samp(c,d) FROM agg_t7;
-- test accum and combine functions directly
--Testcase 360:
CREATE FOREIGN TABLE regr_test (x float8, y float8, id int options (key 'true')) SERVER sqlite_svr;
--Testcase 361:
DELETE FROM regr_test;
--Testcase 362:
INSERT INTO regr_test VALUES (10,150),(20,250),(30,350),(80,540),(100,200);
--Testcase 363:
SELECT count(*), sum(x), regr_sxx(y,x), sum(y),regr_syy(y,x), regr_sxy(y,x)
FROM regr_test WHERE x IN (10,20,30,80);
--Testcase 364:
SELECT count(*), sum(x), regr_sxx(y,x), sum(y),regr_syy(y,x), regr_sxy(y,x)
FROM regr_test;
--Testcase 365:
CREATE FOREIGN TABLE agg_t15 (a text, b int, c int, id int options (key 'true')) SERVER sqlite_svr;
--Testcase 366:
delete from agg_t15;
--Testcase 367:
insert into agg_t15 values ('{4,140,2900}', 100);
--Testcase 368:
SELECT float8_accum(a::float8[], b) from agg_t15;
--Testcase 369:
delete from agg_t15;
--Testcase 370:
insert into agg_t15 values ('{4,140,2900,1290,83075,15050}', 200, 100);
--Testcase 371:
SELECT float8_regr_accum(a::float8[], b, c) from agg_t15;
--Testcase 372:
SELECT count(*), sum(x), regr_sxx(y,x), sum(y),regr_syy(y,x), regr_sxy(y,x)
FROM regr_test WHERE x IN (10,20,30);
--Testcase 373:
SELECT count(*), sum(x), regr_sxx(y,x), sum(y),regr_syy(y,x), regr_sxy(y,x)
FROM regr_test WHERE x IN (80,100);
--Testcase 374:
CREATE FOREIGN TABLE agg_t16 (a text, b text, id int options (key 'true')) SERVER sqlite_svr;
--Testcase 375:
delete from agg_t16;
--Testcase 376:
insert into agg_t16 values ('{3,60,200}', '{0,0,0}');
--Testcase 377:
insert into agg_t16 values ('{0,0,0}', '{2,180,200}');
--Testcase 378:
insert into agg_t16 values ('{3,60,200}', '{2,180,200}');
--Testcase 379:
SELECT float8_combine(a::float8[], b::float8[]) FROM agg_t16;
--Testcase 380:
delete from agg_t16;
--Testcase 381:
insert into agg_t16 values ('{3,60,200,750,20000,2000}', '{0,0,0,0,0,0}');
--Testcase 382:
insert into agg_t16 values ('{0,0,0,0,0,0}', '{2,180,200,740,57800,-3400}');
--Testcase 383:
insert into agg_t16 values ('{3,60,200,750,20000,2000}', '{2,180,200,740,57800,-3400}');
--Testcase 384:
SELECT float8_regr_combine(a::float8[], b::float8[]) FROM agg_t16;
--Testcase 385:
DROP FOREIGN TABLE regr_test;
-- test count, distinct
--Testcase 30:
SELECT count(four) AS cnt_1000 FROM onek;
--Testcase 31:
SELECT count(DISTINCT four) AS cnt_4 FROM onek;
--Testcase 32:
select ten, count(*), sum(four) from onek
group by ten order by ten;
--Testcase 33:
select ten, count(four), sum(DISTINCT four) from onek
group by ten order by ten;
-- user-defined aggregates
--Testcase 386:
CREATE AGGREGATE newavg (
sfunc = int4_avg_accum, basetype = int4, stype = _int8,
finalfunc = int8_avg,
initcond1 = '{0,0}'
);
--Testcase 387:
CREATE AGGREGATE newsum (
sfunc1 = int4pl, basetype = int4, stype1 = int4,
initcond1 = '0'
);
--Testcase 388:
CREATE AGGREGATE newcnt (*) (
sfunc = int8inc, stype = int8,
initcond = '0', parallel = safe
);
--Testcase 389:
CREATE AGGREGATE newcnt ("any") (
sfunc = int8inc_any, stype = int8,
initcond = '0'
);
--Testcase 390:
CREATE AGGREGATE oldcnt (
sfunc = int8inc, basetype = 'ANY', stype = int8,
initcond = '0'
);
--Testcase 391:
create function sum3(int8,int8,int8) returns int8 as
'select $1 + $2 + $3' language sql strict immutable;
--Testcase 392:
create aggregate sum2(int8,int8) (
sfunc = sum3, stype = int8,
initcond = '0'
);
--Testcase 34:
SELECT newavg(four) AS avg_1 FROM onek;
--Testcase 35:
SELECT newsum(four) AS sum_1500 FROM onek;
--Testcase 36:
SELECT newcnt(four) AS cnt_1000 FROM onek;
--Testcase 37:
SELECT newcnt(*) AS cnt_1000 FROM onek;
--Testcase 38:
SELECT oldcnt(*) AS cnt_1000 FROM onek;
--Testcase 39:
SELECT sum2(q1,q2) FROM int8_tbl;
-- test for outer-level aggregates
-- this should work
--Testcase 40:
select ten, sum(distinct four) from onek a
group by ten
having exists (select 1 from onek b where sum(distinct a.four) = b.four);
-- this should fail because subquery has an agg of its own in WHERE
--Testcase 41:
select ten, sum(distinct four) from onek a
group by ten
having exists (select 1 from onek b
where sum(distinct a.four + b.four) = b.four);
-- Test handling of sublinks within outer-level aggregates.
-- Per bug report from Daniel Grace.
--Testcase 42:
select
(select max((select i.unique2 from tenk1 i where i.unique1 = o.unique1)))
from tenk1 o;
-- Test handling of Params within aggregate arguments in hashed aggregation.
-- Per bug report from Jeevan Chalke.
--Testcase 393:
explain (verbose, costs off)
select s1, s2, sm
from generate_series(1, 3) s1,
lateral (select s2, sum(s1 + s2) sm
from generate_series(1, 3) s2 group by s2) ss
order by 1, 2;
--Testcase 394:
select s1, s2, sm
from generate_series(1, 3) s1,
lateral (select s2, sum(s1 + s2) sm
from generate_series(1, 3) s2 group by s2) ss
order by 1, 2;
--Testcase 395:
explain (verbose, costs off)
select array(select sum(x+y) s
from generate_series(1,3) y group by y order by s)
from generate_series(1,3) x;
--Testcase 396:
select array(select sum(x+y) s
from generate_series(1,3) y group by y order by s)
from generate_series(1,3) x;
--
-- test for bitwise integer aggregates
--
--Testcase 397:
CREATE FOREIGN TABLE bitwise_test(
i2 INT2,
i4 INT4,
i8 INT8,
i INTEGER,
x INT2
) SERVER sqlite_svr;
-- empty case
--Testcase 43:
SELECT
BIT_AND(i2) AS "?",
BIT_OR(i4) AS "?"
FROM bitwise_test;
--Testcase 44:
INSERT INTO bitwise_test VALUES
(1, 1, 1, 1, 1),
(3, 3, 3, null, 2),
(7, 7, 7, 3, 4);
--Testcase 45:
SELECT
BIT_AND(i2) AS "1",
BIT_AND(i4) AS "1",
BIT_AND(i8) AS "1",
BIT_AND(i) AS "?",
BIT_AND(x) AS "0",
BIT_OR(i2) AS "7",
BIT_OR(i4) AS "7",
BIT_OR(i8) AS "7",
BIT_OR(i) AS "?",
BIT_OR(x) AS "7"
FROM bitwise_test;
--
-- test boolean aggregates
--
-- first test all possible transition and final states
--Testcase 398:
CREATE FOREIGN TABLE bool_test_tmp(
b1 BOOL OPTIONS (key 'true'),
b2 BOOL OPTIONS (key 'true')
) SERVER sqlite_svr;
-- boolean and transitions
-- null because strict
BEGIN;
--Testcase 399:
INSERT INTO bool_test_tmp VALUES
(NULL, NULL),
(TRUE, NULL),
(FALSE, NULL),
(NULL, TRUE),
(NULL, FALSE);
--Testcase 400:
SELECT booland_statefunc(b1, b2) IS NULL as "t" FROM bool_test_tmp;
ROLLBACK;
-- and actual computations
BEGIN;
--Testcase 401:
INSERT INTO bool_test_tmp VALUES
(TRUE, TRUE);
--Testcase 402:
SELECT booland_statefunc(b1, b2) as "t" FROM bool_test_tmp;
ROLLBACK;
BEGIN;
--Testcase 403:
INSERT INTO bool_test_tmp VALUES
(TRUE, FALSE),
(FALSE, TRUE),
(FALSE, FALSE);
--Testcase 404:
SELECT NOT booland_statefunc(b1, b2) as "t" FROM bool_test_tmp;
ROLLBACK;
-- boolean or transitions
-- null because strict
BEGIN;
--Testcase 405:
INSERT INTO bool_test_tmp VALUES
(NULL, NULL),
(TRUE, NULL),
(FALSE, NULL),
(NULL, TRUE),
(NULL, FALSE);
--Testcase 406:
SELECT boolor_statefunc(b1, b2) IS NULL as "t" FROM bool_test_tmp;
ROLLBACK;
-- actual computations
BEGIN;
--Testcase 407:
INSERT INTO bool_test_tmp VALUES
(TRUE, TRUE),
(TRUE, FALSE),
(FALSE, TRUE);
--Testcase 408:
SELECT boolor_statefunc(b1, b2) as "t" FROM bool_test_tmp;
ROLLBACK;
BEGIN;
--Testcase 409:
INSERT INTO bool_test_tmp VALUES
(FALSE, FALSE);
--Testcase 410:
SELECT NOT boolor_statefunc(b1, b2) as "t" FROM bool_test_tmp;
ROLLBACK;
--Testcase 411:
CREATE FOREIGN TABLE bool_test(
b1 BOOL,
b2 BOOL,
b3 BOOL,
b4 BOOL
) SERVER sqlite_svr;
-- empty case
--Testcase 46:
SELECT
BOOL_AND(b1) AS "n",
BOOL_OR(b3) AS "n"
FROM bool_test;
--Testcase 47:
INSERT INTO bool_test VALUES
(TRUE, null, FALSE, null),
(FALSE, TRUE, null, null),
(null, TRUE, FALSE, null);
--Testcase 48:
SELECT
BOOL_AND(b1) AS "f",
BOOL_AND(b2) AS "t",
BOOL_AND(b3) AS "f",
BOOL_AND(b4) AS "n",
BOOL_AND(NOT b2) AS "f",
BOOL_AND(NOT b3) AS "t"
FROM bool_test;
--Testcase 49:
SELECT
EVERY(b1) AS "f",
EVERY(b2) AS "t",
EVERY(b3) AS "f",
EVERY(b4) AS "n",
EVERY(NOT b2) AS "f",
EVERY(NOT b3) AS "t"
FROM bool_test;
--Testcase 50:
SELECT
BOOL_OR(b1) AS "t",
BOOL_OR(b2) AS "t",
BOOL_OR(b3) AS "f",
BOOL_OR(b4) AS "n",
BOOL_OR(NOT b2) AS "f",
BOOL_OR(NOT b3) AS "t"
FROM bool_test;
--
-- Test cases that should be optimized into indexscans instead of
-- the generic aggregate implementation.
--
-- Basic cases
--Testcase 51:
explain (costs off)
select min(unique1) from tenk1;
--Testcase 52:
select min(unique1) from tenk1;
--Testcase 53:
explain (costs off)
select max(unique1) from tenk1;
--Testcase 54:
select max(unique1) from tenk1;
--Testcase 55:
explain (costs off)
select max(unique1) from tenk1 where unique1 < 42;
--Testcase 56:
select max(unique1) from tenk1 where unique1 < 42;
--Testcase 57:
explain (costs off)
select max(unique1) from tenk1 where unique1 > 42;
--Testcase 58:
select max(unique1) from tenk1 where unique1 > 42;
-- the planner may choose a generic aggregate here if parallel query is
-- enabled, since that plan will be parallel safe and the "optimized"
-- plan, which has almost identical cost, will not be. we want to test
-- the optimized plan, so temporarily disable parallel query.
begin;
set local max_parallel_workers_per_gather = 0;
--Testcase 59:
explain (costs off)
select max(unique1) from tenk1 where unique1 > 42000;
--Testcase 60:
select max(unique1) from tenk1 where unique1 > 42000;
rollback;
-- multi-column index (uses tenk1_thous_tenthous)
--Testcase 61:
explain (costs off)
select max(tenthous) from tenk1 where thousand = 33;
--Testcase 62:
select max(tenthous) from tenk1 where thousand = 33;
--Testcase 63:
explain (costs off)
select min(tenthous) from tenk1 where thousand = 33;
--Testcase 64:
select min(tenthous) from tenk1 where thousand = 33;
-- check parameter propagation into an indexscan subquery
--Testcase 65:
explain (costs off)
select f1, (select min(unique1) from tenk1 where unique1 > f1) AS gt
from int4_tbl;
--Testcase 66:
select f1, (select min(unique1) from tenk1 where unique1 > f1) AS gt
from int4_tbl;
-- check some cases that were handled incorrectly in 8.3.0
--Testcase 67:
explain (costs off)
select distinct max(unique2) from tenk1;
--Testcase 68:
select distinct max(unique2) from tenk1;
--Testcase 69:
explain (costs off)
select max(unique2) from tenk1 order by 1;
--Testcase 70:
select max(unique2) from tenk1 order by 1;
--Testcase 71:
explain (costs off)
select max(unique2) from tenk1 order by max(unique2);
--Testcase 72:
select max(unique2) from tenk1 order by max(unique2);
--Testcase 73:
explain (costs off)
select max(unique2) from tenk1 order by max(unique2)+1;
--Testcase 74:
select max(unique2) from tenk1 order by max(unique2)+1;
--Testcase 75:
explain (costs off)
select max(unique2), generate_series(1,3) as g from tenk1 order by g desc;
--Testcase 76:
select max(unique2), generate_series(1,3) as g from tenk1 order by g desc;
-- interesting corner case: constant gets optimized into a seqscan
--Testcase 77:
explain (costs off)
select max(100) from tenk1;
--Testcase 78:
select max(100) from tenk1;
-- try it on an inheritance tree
--Testcase 412:
create foreign table minmaxtest(f1 int) server sqlite_svr;
--Testcase 413:
create table minmaxtest1() inherits (minmaxtest);
--Testcase 414:
create table minmaxtest2() inherits (minmaxtest);
--Testcase 415:
create table minmaxtest3() inherits (minmaxtest);
--Testcase 416:
create index minmaxtest1i on minmaxtest1(f1);
--Testcase 417:
create index minmaxtest2i on minmaxtest2(f1 desc);
--Testcase 418:
create index minmaxtest3i on minmaxtest3(f1) where f1 is not null;
--Testcase 79:
insert into minmaxtest values(11), (12);
--Testcase 80:
insert into minmaxtest1 values(13), (14);
--Testcase 81:
insert into minmaxtest2 values(15), (16);
--Testcase 82:
insert into minmaxtest3 values(17), (18);
--Testcase 83:
explain (costs off)
select min(f1), max(f1) from minmaxtest;
--Testcase 84:
select min(f1), max(f1) from minmaxtest;
-- DISTINCT doesn't do anything useful here, but it shouldn't fail
--Testcase 85:
explain (costs off)
select distinct min(f1), max(f1) from minmaxtest;
--Testcase 86:
select distinct min(f1), max(f1) from minmaxtest;
-- check for correct detection of nested-aggregate errors
--Testcase 87:
select max(min(unique1)) from tenk1;
--Testcase 88:
select (select max(min(unique1)) from int8_tbl) from tenk1;
--
-- Test removal of redundant GROUP BY columns
--
--Testcase 419:
create foreign table agg_t1 (a int OPTIONS (key 'true'), b int OPTIONS (key 'true'), c int, d int) server sqlite_svr;
--Testcase 420:
create foreign table agg_t2 (x int OPTIONS (key 'true'), y int OPTIONS (key 'true'), z int) server sqlite_svr;
--Testcase 421:
create foreign table agg_t9 (a int OPTIONS (key 'true'), b int OPTIONS (key 'true'), c int) server sqlite_svr;
-- Non-primary-key columns can be removed from GROUP BY
--Testcase 89:
explain (costs off) select * from agg_t1 group by a,b,c,d;
-- No removal can happen if the complete PK is not present in GROUP BY
--Testcase 90:
explain (costs off) select a,c from agg_t1 group by a,c,d;
-- Test removal across multiple relations
--Testcase 91:
explain (costs off) select *
from agg_t1 inner join agg_t2 on agg_t1.a = agg_t2.x and agg_t1.b = agg_t2.y
group by agg_t1.a,agg_t1.b,agg_t1.c,agg_t1.d,agg_t2.x,agg_t2.y,agg_t2.z;
-- Test case where agg_t1 can be optimized but not agg_t2
--Testcase 92:
explain (costs off) select agg_t1.*,agg_t2.x,agg_t2.z
from agg_t1 inner join agg_t2 on agg_t1.a = agg_t2.x and agg_t1.b = agg_t2.y
group by agg_t1.a,agg_t1.b,agg_t1.c,agg_t1.d,agg_t2.x,agg_t2.z;
-- Cannot optimize when PK is deferrable
--Testcase 422:
explain (costs off) select * from agg_t9 group by a,b,c;
--Testcase 423:
create temp table t1c () inherits (agg_t1);
-- Ensure we don't remove any columns when t1 has a child table
--Testcase 424:
explain (costs off) select * from agg_t1 group by a,b,c,d;
-- Okay to remove columns if we're only querying the parent.
--Testcase 425:
explain (costs off) select * from only agg_t1 group by a,b,c,d;
-- Skip this test, duckdb_fdw does not support partition table
--create foreign table p_t1 (
-- a int options (key 'true'),
-- b int options (key 'true'),
-- c int,
-- d int,
--) partition by list(a) server sqlite_svr;
--create temp table p_t1_1 partition of p_t1 for values in(1);
--create temp table p_t1_2 partition of p_t1 for values in(2);
-- Ensure we can remove non-PK columns for partitioned tables.
--explain (costs off) select * from p_t1 group by a,b,c,d;
--drop table t1 cascade;
--drop table t2;
--drop table t3;
--drop table p_t1;
--
-- Test GROUP BY matching of join columns that are type-coerced due to USING
--
--Testcase 426:
create foreign table t1(f1 int, f2 bigint) server sqlite_svr;
--Testcase 427:
create foreign table t2(f1 bigint, f22 bigint) server sqlite_svr;
--Testcase 428:
select f1 from t1 left join t2 using (f1) group by f1;
--Testcase 429:
select f1 from t1 left join t2 using (f1) group by t1.f1;
--Testcase 430:
select t1.f1 from t1 left join t2 using (f1) group by t1.f1;
-- only this one should fail:
--Testcase 431:
select t1.f1 from t1 left join t2 using (f1) group by f1;
--Testcase 432:
drop foreign table t1, t2;
--
-- Test combinations of DISTINCT and/or ORDER BY
--
begin;
--Testcase 93:
delete from INT8_TBL;
--Testcase 94:
insert into INT8_TBL values (1,4),(2,3),(3,1),(4,2);
--Testcase 95:
select array_agg(q1 order by q2)
from INT8_TBL;
--Testcase 96:
select array_agg(q1 order by q1)
from INT8_TBL;
--Testcase 97:
select array_agg(q1 order by q1 desc)
from INT8_TBL;
--Testcase 98:
select array_agg(q2 order by q1 desc)
from INT8_TBL;
--Testcase 99:
delete from INT4_TBL;
--Testcase 100:
insert into INT4_TBL values (1),(2),(1),(3),(null),(2);
--Testcase 101:
select array_agg(distinct f1)
from INT4_TBL;
--Testcase 102:
select array_agg(distinct f1 order by f1)
from INT4_TBL;
--Testcase 103:
select array_agg(distinct f1 order by f1 desc)
from INT4_TBL;
--Testcase 104:
select array_agg(distinct f1 order by f1 desc nulls last)
from INT4_TBL;
rollback;
-- multi-arg aggs, strict/nonstrict, distinct/order by
--Testcase 433:
create type aggtype as (a integer, b integer, c text);
--Testcase 434:
create function aggf_trans(aggtype[],integer,integer,text) returns aggtype[]
as 'select array_append($1,ROW($2,$3,$4)::aggtype)'
language sql strict immutable;
--Testcase 435:
create function aggfns_trans(aggtype[],integer,integer,text) returns aggtype[]
as 'select array_append($1,ROW($2,$3,$4)::aggtype)'
language sql immutable;
--Testcase 436:
create aggregate aggfstr(integer,integer,text) (
sfunc = aggf_trans, stype = aggtype[],
initcond = '{}'
);
--Testcase 437:
create aggregate aggfns(integer,integer,text) (
sfunc = aggfns_trans, stype = aggtype[], sspace = 10000,
initcond = '{}'
);
begin;
--Testcase 105:
insert into multi_arg_agg values (1,3,'foo'),(0,null,null),(2,2,'bar'),(3,1,'baz');
--Testcase 106:
select aggfstr(a,b,c) from multi_arg_agg;
--Testcase 107:
select aggfns(a,b,c) from multi_arg_agg;
--Testcase 108:
select aggfstr(distinct a,b,c) from multi_arg_agg, generate_series(1,3) i;
--Testcase 109:
select aggfns(distinct a,b,c) from multi_arg_agg, generate_series(1,3) i;
--Testcase 110:
select aggfstr(distinct a,b,c order by b) from multi_arg_agg, generate_series(1,3) i;
--Testcase 111:
select aggfns(distinct a,b,c order by b) from multi_arg_agg, generate_series(1,3) i;
-- test specific code paths
--Testcase 112:
select aggfns(distinct a,a,c order by c using ~<~,a) from multi_arg_agg, generate_series(1,2) i;
--Testcase 113:
select aggfns(distinct a,a,c order by c using ~<~) from multi_arg_agg, generate_series(1,2) i;
--Testcase 114:
select aggfns(distinct a,a,c order by a) from multi_arg_agg, generate_series(1,2) i;
--Testcase 115:
select aggfns(distinct a,b,c order by a,c using ~<~,b) from multi_arg_agg, generate_series(1,2) i;
-- check node I/O via view creation and usage, also deparsing logic
--Testcase 438:
create view agg_view1 as
select aggfns(a,b,c) from multi_arg_agg;
--Testcase 116:
select * from agg_view1;
--Testcase 117:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 439:
create or replace view agg_view1 as
select aggfns(distinct a,b,c) from multi_arg_agg, generate_series(1,3) i;
--Testcase 118:
select * from agg_view1;
--Testcase 119:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 440:
create or replace view agg_view1 as
select aggfns(distinct a,b,c order by b) from multi_arg_agg, generate_series(1,3) i;
--Testcase 120:
select * from agg_view1;
--Testcase 121:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 441:
create or replace view agg_view1 as
select aggfns(a,b,c order by b+1) from multi_arg_agg;
--Testcase 122:
select * from agg_view1;
--Testcase 123:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 442:
create or replace view agg_view1 as
select aggfns(a,a,c order by b) from multi_arg_agg;
--Testcase 124:
select * from agg_view1;
--Testcase 125:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 443:
create or replace view agg_view1 as
select aggfns(a,b,c order by c using ~<~) from multi_arg_agg;
--Testcase 126:
select * from agg_view1;
--Testcase 127:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 444:
create or replace view agg_view1 as
select aggfns(distinct a,b,c order by a,c using ~<~,b) from multi_arg_agg, generate_series(1,2) i;
--Testcase 128:
select * from agg_view1;
--Testcase 129:
select pg_get_viewdef('agg_view1'::regclass);
--Testcase 445:
drop view agg_view1;
rollback;
-- incorrect DISTINCT usage errors
--Testcase 130:
insert into multi_arg_agg values (1,1,'foo');
--Testcase 131:
select aggfns(distinct a,b,c order by i) from multi_arg_agg, generate_series(1,2) i;
--Testcase 132:
select aggfns(distinct a,b,c order by a,b+1) from multi_arg_agg, generate_series(1,2) i;
--Testcase 133:
select aggfns(distinct a,b,c order by a,b,i,c) from multi_arg_agg, generate_series(1,2) i;
--Testcase 134:
select aggfns(distinct a,a,c order by a,b) from multi_arg_agg, generate_series(1,2) i;
-- string_agg tests
begin;
--Testcase 135:
delete from varchar_tbl;
--Testcase 136:
insert into varchar_tbl values ('aaaa'),('bbbb'),('cccc');
--Testcase 137:
select string_agg(f1,',') from varchar_tbl;
--Testcase 138:
delete from varchar_tbl;
--Testcase 139:
insert into varchar_tbl values ('aaaa'),(null),('bbbb'),('cccc');
--Testcase 140:
select string_agg(f1,',') from varchar_tbl;
--Testcase 141:
delete from varchar_tbl;
--Testcase 142:
insert into varchar_tbl values (null),(null),('bbbb'),('cccc');
--Testcase 143:
select string_agg(f1,'AB') from varchar_tbl;
--Testcase 144:
delete from varchar_tbl;
--Testcase 145:
insert into varchar_tbl values (null),(null);
--Testcase 146:
select string_agg(f1,',') from varchar_tbl;
rollback;
-- check some implicit casting cases, as per bug #5564
--Testcase 147:
select string_agg(distinct f1, ',' order by f1) from varchar_tbl; -- ok
--Testcase 148:
select string_agg(distinct f1::text, ',' order by f1) from varchar_tbl; -- not ok
--Testcase 149:
select string_agg(distinct f1, ',' order by f1::text) from varchar_tbl; -- not ok
--Testcase 150:
select string_agg(distinct f1::text, ',' order by f1::text) from varchar_tbl; -- ok
-- string_agg bytea tests
--Testcase 446:
create foreign table bytea_test_table(v bytea) server sqlite_svr;
--Testcase 151:
select string_agg(v, '') from bytea_test_table;
--Testcase 152:
insert into bytea_test_table values(decode('ff','hex'));
--Testcase 153:
select string_agg(v, '') from bytea_test_table;
--Testcase 154:
insert into bytea_test_table values(decode('aa','hex'));
--Testcase 155:
select string_agg(v, '') from bytea_test_table;
--Testcase 156:
select string_agg(v, NULL) from bytea_test_table;
--Testcase 157:
select string_agg(v, decode('ee', 'hex')) from bytea_test_table;
--Testcase 447:
drop foreign table bytea_test_table;
-- FILTER tests
--Testcase 158:
select min(unique1) filter (where unique1 > 100) from tenk1;
--Testcase 159:
select sum(1/ten) filter (where ten > 0) from tenk1;
--Testcase 160:
select ten, sum(distinct four) filter (where four::text ~ '123') from onek a
group by ten;
--Testcase 161:
select ten, sum(distinct four) filter (where four > 10) from onek a
group by ten
having exists (select 1 from onek b where sum(distinct a.four) = b.four);
--Testcase 448:
create foreign table agg_t17(foo text, bar text) server sqlite_svr;
--Testcase 449:
insert into agg_t17 values ('a', 'b');
--Testcase 450:
select max(foo COLLATE "C") filter (where (bar collate "POSIX") > '0')
from agg_t17;
-- outer reference in FILTER (PostgreSQL extension)
--Testcase 451:
create foreign table agg_t18 (inner_c int) server sqlite_svr;
--Testcase 452:
create foreign table agg_t19 (outer_c int) server sqlite_svr;
--Testcase 453:
insert into agg_t18 values (1);
--Testcase 454:
insert into agg_t19 values (2), (3);
--Testcase 455:
select (select count(*)
from agg_t18) from agg_t19; -- inner query is aggregation query
--Testcase 456:
select (select count(*) filter (where outer_c <> 0)
from agg_t18) from agg_t19; -- outer query is aggregation query
--Testcase 457:
select (select count(inner_c) filter (where outer_c <> 0)
from agg_t18) from agg_t19; -- inner query is aggregation query
--Testcase 162:
select
(select max((select i.unique2 from tenk1 i where i.unique1 = o.unique1))
filter (where o.unique1 < 10))
from tenk1 o; -- outer query is aggregation query
-- subquery in FILTER clause (PostgreSQL extension)
--Testcase 163:
select sum(unique1) FILTER (WHERE
unique1 IN (SELECT unique1 FROM onek where unique1 < 100)) FROM tenk1;
-- exercise lots of aggregate parts with FILTER
begin;
--Testcase 164:
delete from multi_arg_agg;
--Testcase 165:
insert into multi_arg_agg values (1,3,'foo'),(0,null,null),(2,2,'bar'),(3,1,'baz');
--Testcase 166:
select aggfns(distinct a,b,c order by a,c using ~<~,b) filter (where a > 1) from multi_arg_agg, generate_series(1,2) i;
rollback;
-- ordered-set aggregates
begin;
--Testcase 167:
delete from FLOAT8_TBL;
--Testcase 168:
insert into FLOAT8_TBL values (0::float8),(0.1),(0.25),(0.4),(0.5),(0.6),(0.75),(0.9),(1);
--Testcase 169:
select f1, percentile_cont(f1) within group (order by x::float8)
from generate_series(1,5) x,
FLOAT8_TBL
group by f1 order by f1;
rollback;
begin;
--Testcase 170:
delete from FLOAT8_TBL;
--Testcase 171:
insert into FLOAT8_TBL values (0::float8),(0.1),(0.25),(0.4),(0.5),(0.6),(0.75),(0.9),(1);
--Testcase 172:
select f1, percentile_cont(f1 order by f1) within group (order by x) -- error
from generate_series(1,5) x,
FLOAT8_TBL
group by f1 order by f1;
rollback;
begin;
--Testcase 173:
delete from FLOAT8_TBL;
--Testcase 174:
insert into FLOAT8_TBL values (0::float8),(0.1),(0.25),(0.4),(0.5),(0.6),(0.75),(0.9),(1);
--Testcase 175:
select f1, sum() within group (order by x::float8) -- error
from generate_series(1,5) x,
FLOAT8_TBL
group by f1 order by f1;
rollback;
begin;
--Testcase 176:
delete from FLOAT8_TBL;
--Testcase 177:
insert into FLOAT8_TBL values (0::float8),(0.1),(0.25),(0.4),(0.5),(0.6),(0.75),(0.9),(1);
--Testcase 178:
select f1, percentile_cont(f1,f1) -- error
from generate_series(1,5) x,
FLOAT8_TBL
group by f1 order by f1;
rollback;
--Testcase 179:
-- Round the result to limited digits to avoid platform-specific results.
select (percentile_cont(0.5) within group (order by b))::numeric(20,10) from aggtest;
--Testcase 180:
-- Round the result to limited digits to avoid platform-specific results.
select (percentile_cont(0.5) within group (order by b))::numeric(20,10), sum(b)::numeric(10,3) from aggtest;
--Testcase 181:
-- Round the result to limited digits to avoid platform-specific results.
select percentile_cont(0.5) within group (order by thousand) from tenk1;
--Testcase 182:
select percentile_disc(0.5) within group (order by thousand) from tenk1;
begin;
--Testcase 183:
delete from INT4_TBL;
--Testcase 184:
insert into INT4_TBL values (1),(1),(2),(2),(3),(3),(4);
--Testcase 185:
select rank(3) within group (order by f1) from INT4_TBL;
--Testcase 186:
select cume_dist(3) within group (order by f1) from INT4_TBL;
--Testcase 187:
insert into INT4_TBL values (5);
--Testcase 458:
-- Round the result to limited digits to avoid platform-specific results.
select (percent_rank(3) within group (order by f1))::numeric(20,10) from INT4_TBL;
--Testcase 459:
delete from INT4_TBL where f1 = 5;
--Testcase 188:
select dense_rank(3) within group (order by f1) from INT4_TBL;
rollback;
--Testcase 189:
select percentile_disc(array[0,0.1,0.25,0.5,0.75,0.9,1]) within group (order by thousand)
from tenk1;
--Testcase 190:
select percentile_cont(array[0,0.25,0.5,0.75,1]) within group (order by thousand)
from tenk1;
--Testcase 191:
select percentile_disc(array[[null,1,0.5],[0.75,0.25,null]]) within group (order by thousand)
from tenk1;
--Testcase 460:
create foreign table agg_t21 (x int) server sqlite_svr;
begin;
--Testcase 248:
insert into agg_t21 select * from generate_series(1,6);
--Testcase 249:
select percentile_cont(array[0,1,0.25,0.75,0.5,1,0.3,0.32,0.35,0.38,0.4]) within group (order by x)
from agg_t21;
rollback;
--Testcase 192:
select ten, mode() within group (order by string4) from tenk1 group by ten;
--Testcase 461:
create foreign table agg_t20 (x text) server sqlite_svr;
begin;
--Testcase 462:
insert into agg_t20 values (unnest('{fred,jim,fred,jack,jill,fred,jill,jim,jim,sheila,jim,sheila}'::text[]));
--Testcase 463:
select percentile_disc(array[0.25,0.5,0.75]) within group (order by x) from agg_t20;
rollback;
-- check collation propagates up in suitable cases:
begin;
--Testcase 464:
insert into agg_t20 values ('fred'), ('jim');
--Testcase 465:
select pg_collation_for(percentile_disc(1) within group (order by x collate "POSIX")) from agg_t20;
rollback;
-- ordered-set aggs created with CREATE AGGREGATE
--Testcase 466:
create aggregate my_percentile_disc(float8 ORDER BY anyelement) (
stype = internal,
sfunc = ordered_set_transition,
finalfunc = percentile_disc_final,
finalfunc_extra = true,
finalfunc_modify = read_write
);
--Testcase 467:
create aggregate my_rank(VARIADIC "any" ORDER BY VARIADIC "any") (
stype = internal,
sfunc = ordered_set_transition_multi,
finalfunc = rank_final,
finalfunc_extra = true,
hypothetical
);
alter aggregate my_percentile_disc(float8 ORDER BY anyelement)
rename to test_percentile_disc;
alter aggregate my_rank(VARIADIC "any" ORDER BY VARIADIC "any")
rename to test_rank;
begin;
--Testcase 468:
insert into agg_t21 values (1),(1),(2),(2),(3),(3),(4);
--Testcase 469:
select test_rank(3) within group (order by x) from agg_t21;
rollback;
--Testcase 193:
select test_percentile_disc(0.5) within group (order by thousand) from tenk1;
-- ordered-set aggs can't use ungrouped vars in direct args:
begin;
--Testcase 470:
insert into agg_t21 select * from generate_series(1,5);
--Testcase 471:
select rank(x) within group (order by x) from agg_t21;
rollback;
-- outer-level agg can't use a grouped arg of a lower level, either:
begin;
--Testcase 472:
insert into agg_t21 select * from generate_series(1,5);
--Testcase 473:
select array(select percentile_disc(a) within group (order by x)
from (values (0.3),(0.7)) v(a) group by a)
from agg_t21;
rollback;
-- agg in the direct args is a grouping violation, too:
begin;
--Testcase 474:
insert into agg_t21 select * from generate_series(1,5);
--Testcase 475:
select rank(sum(x)) within group (order by x) from agg_t21;
rollback;
-- hypothetical-set type unification and argument-count failures:
begin;
--Testcase 264:
insert into agg_t20 values ('fred'), ('jim');
--Testcase 265:
select rank(3) within group (order by x) from agg_t20;
rollback;
--Testcase 194:
select rank(3) within group (order by stringu1,stringu2) from tenk1;
begin;
--Testcase 476:
insert into agg_t21 select * from generate_series(1,5);
--Testcase 477:
select rank('fred') within group (order by x) from agg_t21;
rollback;
begin;
--Testcase 478:
insert into agg_t20 values ('fred'), ('jim');
--Testcase 479:
select rank('adam'::text collate "C") within group (order by x collate "POSIX")
from agg_t20;
rollback;
-- hypothetical-set type unification successes:
begin;
--Testcase 480:
insert into agg_t20 values ('fred'), ('jim');
--Testcase 481:
select rank('adam'::varchar) within group (order by x) from agg_t20;
rollback;
begin;
--Testcase 482:
insert into agg_t21 select * from generate_series(1,5);
--Testcase 483:
select rank('3') within group (order by x) from agg_t21;
rollback;
-- divide by zero check
begin;
--Testcase 484:
insert into agg_t21 select * from generate_series(1,0);
--Testcase 485:
select percent_rank(0) within group (order by x) from agg_t21;
rollback;
-- deparse and multiple features:
--Testcase 486:
create view aggordview1 as
select ten,
percentile_disc(0.5) within group (order by thousand) as p50,
percentile_disc(0.5) within group (order by thousand) filter (where hundred=1) as px,
rank(5,'AZZZZ',50) within group (order by hundred, string4 desc, hundred)
from tenk1
group by ten order by ten;
--Testcase 196:
select pg_get_viewdef('aggordview1');
--Testcase 197:
select * from aggordview1 order by ten;
--Testcase 487:
drop view aggordview1;
-- variadic aggregates
--Testcase 488:
create function least_accum(anyelement, variadic anyarray)
returns anyelement language sql as
'select least($1, min($2[i])) from generate_subscripts($2,1) g(i)';
--Testcase 489:
create aggregate least_agg(variadic items anyarray) (
stype = anyelement, sfunc = least_accum
);
--Testcase 490:
create function cleast_accum(anycompatible, variadic anycompatiblearray)
returns anycompatible language sql as
'select least($1, min($2[i])) from generate_subscripts($2,1) g(i)';
--Testcase 491:
create aggregate cleast_agg(variadic items anycompatiblearray) (
stype = anycompatible, sfunc = cleast_accum);
--Testcase 198:
select least_agg(q1,q2) from int8_tbl;
--Testcase 199:
select least_agg(variadic array[q1,q2]) from int8_tbl;
--Testcase 492:
select cleast_agg(q1,q2) from int8_tbl;
--Testcase 493:
select cleast_agg(4.5,f1) from int4_tbl;
--Testcase 494:
select cleast_agg(variadic array[4.5,f1]) from int4_tbl;
--Testcase 495:
select pg_typeof(cleast_agg(variadic array[4.5,f1])) from int4_tbl;
-- test aggregates with common transition functions share the same states
--Testcase 496:
create foreign table agg_t10(one int, id int options (key 'true')) server sqlite_svr;
--Testcase 497:
create foreign table agg_t11(one int, two int, id int options (key 'true')) server sqlite_svr;
--Testcase 498:
create foreign table agg_t12(a int, id int options (key 'true')) server sqlite_svr;
begin work;
--Testcase 499:
create type avg_state as (total bigint, count bigint);
--Testcase 500:
create or replace function avg_transfn(state avg_state, n int) returns avg_state as
$$
declare new_state avg_state;
begin
raise notice 'avg_transfn called with %', n;
if state is null then
if n is not null then
new_state.total := n;
new_state.count := 1;
return new_state;
end if;
return null;
elsif n is not null then
state.total := state.total + n;
state.count := state.count + 1;
return state;
end if;
return null;
end
$$ language plpgsql;
--Testcase 501:
create function avg_finalfn(state avg_state) returns int4 as
$$
begin
if state is null then
return NULL;
else
return state.total / state.count;
end if;
end
$$ language plpgsql;
--Testcase 502:
create function sum_finalfn(state avg_state) returns int4 as
$$
begin
if state is null then
return NULL;
else
return state.total;
end if;
end
$$ language plpgsql;
--Testcase 503:
create aggregate my_avg(int4)
(
stype = avg_state,
sfunc = avg_transfn,
finalfunc = avg_finalfn
);
--Testcase 504:
create aggregate my_sum(int4)
(
stype = avg_state,
sfunc = avg_transfn,
finalfunc = sum_finalfn
);
-- aggregate state should be shared as aggs are the same.
--Testcase 505:
delete from agg_t10;
--Testcase 506:
insert into agg_t10 values (1), (3);
--Testcase 507:
select my_avg(one),my_avg(one) from agg_t10;
-- aggregate state should be shared as transfn is the same for both aggs.
--Testcase 508:
select my_avg(one),my_sum(one) from agg_t10;
-- same as previous one, but with DISTINCT, which requires sorting the input.
--Testcase 509:
delete from agg_t10;
--Testcase 510:
insert into agg_t10 values (1), (3), (1);
--Testcase 511:
select my_avg(distinct one),my_sum(distinct one) from agg_t10;
-- shouldn't share states due to the distinctness not matching.
--Testcase 512:
delete from agg_t10;
--Testcase 513:
insert into agg_t10 values (1), (3);
--Testcase 514:
select my_avg(distinct one),my_sum(one) from agg_t10;
-- shouldn't share states due to the filter clause not matching.
--Testcase 515:
select my_avg(one) filter (where one > 1),my_sum(one) from agg_t10;
-- this should not share the state due to different input columns.
--Testcase 516:
delete from agg_t11;
--Testcase 517:
insert into agg_t11 values (1,2),(3,4);
--Testcase 518:
select my_avg(one),my_sum(two) from agg_t11;
-- exercise cases where OSAs share state
--Testcase 519:
delete from agg_t12;
--Testcase 520:
insert into agg_t12 values (1), (3), (5), (7);
--Testcase 521:
select
percentile_cont(0.5) within group (order by a),
percentile_disc(0.5) within group (order by a)
from agg_t12;
--Testcase 522:
select
percentile_cont(0.25) within group (order by a),
percentile_disc(0.5) within group (order by a)
from agg_t12;
-- these can't share state currently
--Testcase 523:
select
rank(4) within group (order by a),
dense_rank(4) within group (order by a)
from agg_t12;
-- test that aggs with the same sfunc and initcond share the same agg state
--Testcase 524:
create aggregate my_sum_init(int4)
(
stype = avg_state,
sfunc = avg_transfn,
finalfunc = sum_finalfn,
initcond = '(10,0)'
);
--Testcase 525:
create aggregate my_avg_init(int4)
(
stype = avg_state,
sfunc = avg_transfn,
finalfunc = avg_finalfn,
initcond = '(10,0)'
);
--Testcase 526:
create aggregate my_avg_init2(int4)
(
stype = avg_state,
sfunc = avg_transfn,
finalfunc = avg_finalfn,
initcond = '(4,0)'
);
-- state should be shared if INITCONDs are matching
--Testcase 527:
delete from agg_t10;
--Testcase 528:
insert into agg_t10 values (1), (3);
--Testcase 529:
select my_sum_init(one),my_avg_init(one) from agg_t10;
-- Varying INITCONDs should cause the states not to be shared.
--Testcase 530:
select my_sum_init(one),my_avg_init2(one) from agg_t10;
rollback;
-- test aggregate state sharing to ensure it works if one aggregate has a
-- finalfn and the other one has none.
begin work;
--Testcase 531:
create or replace function sum_transfn(state int4, n int4) returns int4 as
$$
declare new_state int4;
begin
raise notice 'sum_transfn called with %', n;
if state is null then
if n is not null then
new_state := n;
return new_state;
end if;
return null;
elsif n is not null then
state := state + n;
return state;
end if;
return null;
end
$$ language plpgsql;
--Testcase 532:
create function halfsum_finalfn(state int4) returns int4 as
$$
begin
if state is null then
return NULL;
else
return state / 2;
end if;
end
$$ language plpgsql;
--Testcase 533:
create aggregate my_sum(int4)
(
stype = int4,
sfunc = sum_transfn
);
--Testcase 534:
create aggregate my_half_sum(int4)
(
stype = int4,
sfunc = sum_transfn,
finalfunc = halfsum_finalfn
);
-- Agg state should be shared even though my_sum has no finalfn
--Testcase 535:
delete from agg_t10;
--Testcase 536:
insert into agg_t10 values (1), (2), (3), (4);
--Testcase 537:
select my_sum(one),my_half_sum(one) from agg_t10;
rollback;
-- test that the aggregate transition logic correctly handles
-- transition / combine functions returning NULL
-- First test the case of a normal transition function returning NULL
BEGIN;
--Testcase 538:
CREATE FUNCTION balkifnull(int8, int4)
RETURNS int8
STRICT
LANGUAGE plpgsql AS $$
BEGIN
IF $1 IS NULL THEN
RAISE 'erroneously called with NULL argument';
END IF;
RETURN NULL;
END$$;
--Testcase 539:
CREATE AGGREGATE balk(int4)
(
SFUNC = balkifnull(int8, int4),
STYPE = int8,
PARALLEL = SAFE,
INITCOND = '0'
);
--Testcase 200:
SELECT balk(hundred) FROM tenk1;
ROLLBACK;
-- Secondly test the case of a parallel aggregate combiner function
-- returning NULL. For that use normal transition function, but a
-- combiner function returning NULL.
BEGIN ISOLATION LEVEL REPEATABLE READ;
--Testcase 540:
CREATE FUNCTION balkifnull(int8, int8)
RETURNS int8
PARALLEL SAFE
STRICT
LANGUAGE plpgsql AS $$
BEGIN
IF $1 IS NULL THEN
RAISE 'erroneously called with NULL argument';
END IF;
RETURN NULL;
END$$;
--Testcase 541:
CREATE AGGREGATE balk(int4)
(
SFUNC = int4_sum(int8, int4),
STYPE = int8,
COMBINEFUNC = balkifnull(int8, int8),
PARALLEL = SAFE,
INITCOND = '0'
);
-- force use of parallelism
-- Skip this test, cannot alter foreign table tenk1
-- ALTER FOREIGN TABLE tenk1 set (parallel_workers = 4);
-- SET LOCAL parallel_setup_cost=0;
-- SET LOCAL max_parallel_workers_per_gather=4;
-- EXPLAIN (COSTS OFF) SELECT balk(hundred) FROM tenk1;
-- SELECT balk(hundred) FROM tenk1;
ROLLBACK;
-- test coverage for aggregate combine/serial/deserial functions
BEGIN ISOLATION LEVEL REPEATABLE READ;
SET parallel_setup_cost = 0;
SET parallel_tuple_cost = 0;
SET min_parallel_table_scan_size = 0;
SET max_parallel_workers_per_gather = 4;
SET parallel_leader_participation = off;
SET enable_indexonlyscan = off;
-- variance(int4) covers numeric_poly_combine
-- sum(int8) covers int8_avg_combine
-- regr_count(float8, float8) covers int8inc_float8_float8 and aggregates with > 1 arg
--Testcase 542:
EXPLAIN (COSTS OFF, VERBOSE)
SELECT variance(unique1::int4), sum(unique1::int8), regr_count(unique1::float8, unique1::float8)
FROM (SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1) u;
--Testcase 543:
SELECT variance(unique1::int4), sum(unique1::int8), regr_count(unique1::float8, unique1::float8)
FROM (SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1) u;
-- variance(int8) covers numeric_combine
-- avg(numeric) covers numeric_avg_combine
--Testcase 544:
EXPLAIN (COSTS OFF, VERBOSE)
SELECT variance(unique1::int8), avg(unique1::numeric)
FROM (SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1) u;
--Testcase 545:
SELECT variance(unique1::int8), avg(unique1::numeric)
FROM (SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1
UNION ALL SELECT * FROM tenk1) u;
ROLLBACK;
-- test coverage for dense_rank
--Testcase 546:
create foreign table agg_t13(x int, id int options (key 'true')) server sqlite_svr;
--Testcase 547:
insert into agg_t13 values (1),(1),(2),(2),(3),(3);
--Testcase 548:
SELECT dense_rank(x) WITHIN GROUP (ORDER BY x) FROM agg_t13 GROUP BY (x) ORDER BY 1;
--Testcase 549:
delete from agg_t13;
-- Ensure that the STRICT checks for aggregates does not take NULLness
-- of ORDER BY columns into account. See bug report around
-- 2a505161-2727-2473-7c46-591ed108ac52@email.cz
--Testcase 550:
create foreign table agg_t14(x int, y int, id int options (key 'true')) server sqlite_svr;
--Testcase 551:
insert into agg_t14 values (1, NULL), (1, 2);
--Testcase 552:
SELECT min(x ORDER BY y) FROM agg_t14;
--Testcase 553:
SELECT min(x ORDER BY y) FROM agg_t14;
-- check collation-sensitive matching between grouping expressions
begin;
--Testcase 554:
insert into agg_t20 values (unnest(array['a','b']));
--Testcase 555:
select x||'a', case x||'a' when 'aa' then 1 else 0 end, count(*)
from agg_t20 group by x||'a' order by 1;
rollback;
begin;
--Testcase 556:
insert into agg_t20 values (unnest(array['a','b']));
--Testcase 557:
select x||'a', case when x||'a' = 'aa' then 1 else 0 end, count(*)
from agg_t20 group by x||'a' order by 1;
rollback;
-- Make sure that generation of HashAggregate for uniqification purposes
-- does not lead to array overflow due to unexpected duplicate hash keys
-- see CAFeeJoKKu0u+A_A9R9316djW-YW3-+Gtgvy3ju655qRHR3jtdA@mail.gmail.com
--Testcase 558:
explain (costs off)
select 1 from tenk1
where (hundred, thousand) in (select twothousand, twothousand from onek);
--
-- Hash Aggregation Spill tests
--
set enable_sort=false;
set work_mem='64kB';
--Testcase 559:
select unique1, count(*), sum(twothousand) from tenk1
group by unique1
having sum(fivethous) > 4975
order by sum(twothousand);
set work_mem to default;
set enable_sort to default;
--
-- Compare results between plans using sorting and plans using hash
-- aggregation. Force spilling in both cases by setting work_mem low.
--
set work_mem='64kB';
--Testcase 560:
create foreign table agg_data_2k(g int, id int options (key 'true')) server sqlite_svr;
--Testcase 561:
create foreign table agg_data_20k(g int, id int options (key 'true')) server sqlite_svr;
--Testcase 562:
create foreign table agg_group_1(c1 int, c2 numeric, c3 int) server sqlite_svr;
--Testcase 563:
create foreign table agg_group_2(a int, c1 numeric, c2 text, c3 int) server sqlite_svr;
--Testcase 564:
create foreign table agg_group_3(c1 numeric, c2 int4, c3 int) server sqlite_svr;
--Testcase 565:
create foreign table agg_group_4(c1 numeric, c2 text, c3 int) server sqlite_svr;
--Testcase 566:
create foreign table agg_hash_1(c1 int, c2 numeric, c3 int) server sqlite_svr;
--Testcase 567:
create foreign table agg_hash_2(a int, c1 numeric, c2 text, c3 int) server sqlite_svr;
--Testcase 568:
create foreign table agg_hash_3(c1 numeric, c2 int4, c3 int) server sqlite_svr;
--Testcase 569:
create foreign table agg_hash_4(c1 numeric, c2 text, c3 int) server sqlite_svr;
--Testcase 570:
insert into agg_data_2k select g from generate_series(0, 1999) g;
--analyze agg_data_2k;
--Testcase 571:
insert into agg_data_20k select g from generate_series(0, 19999) g;
--analyze agg_data_20k;
-- Produce results with sorting.
set enable_hashagg = false;
set jit_above_cost = 0;
--Testcase 572:
explain (costs off)
select g%10000 as c1, sum(g::numeric) as c2, count(*) as c3
from agg_data_20k group by g%10000;
--Testcase 573:
insert into agg_group_1
select g%10000 as c1, sum(g::numeric) as c2, count(*) as c3
from agg_data_20k group by g%10000;
--Testcase 574:
insert into agg_group_2
select * from
(values (100), (300), (500)) as r(a),
lateral (
select (g/2)::numeric as c1,
array_agg(g::numeric) as c2,
count(*) as c3
from agg_data_2k
where g < r.a
group by g/2) as s;
set jit_above_cost to default;
--Testcase 575:
insert into agg_group_3
select (g/2)::numeric as c1, sum(7::int4) as c2, count(*) as c3
from agg_data_2k group by g/2;
--Testcase 576:
insert into agg_group_4
select (g/2)::numeric as c1, array_agg(g::numeric) as c2, count(*) as c3
from agg_data_2k group by g/2;
-- Produce results with hash aggregation
set enable_hashagg = true;
set enable_sort = false;
set jit_above_cost = 0;
--Testcase 577:
explain (costs off)
select g%10000 as c1, sum(g::numeric) as c2, count(*) as c3
from agg_data_20k group by g%10000;
--Testcase 578:
insert into agg_hash_1
select g%10000 as c1, sum(g::numeric) as c2, count(*) as c3
from agg_data_20k group by g%10000;
--Testcase 579:
insert into agg_hash_2
select * from
(values (100), (300), (500)) as r(a),
lateral (
select (g/2)::numeric as c1,
array_agg(g::numeric) as c2,
count(*) as c3
from agg_data_2k
where g < r.a
group by g/2) as s;
set jit_above_cost to default;
--Testcase 580:
insert into agg_hash_3
select (g/2)::numeric as c1, sum(7::int4) as c2, count(*) as c3
from agg_data_2k group by g/2;
--Testcase 581:
insert into agg_hash_4
select (g/2)::numeric as c1, array_agg(g::numeric) as c2, count(*) as c3
from agg_data_2k group by g/2;
set enable_sort = true;
set work_mem to default;
-- Compare group aggregation results to hash aggregation results
--Testcase 582:
(select * from agg_hash_1 except select * from agg_group_1)
union all
(select * from agg_group_1 except select * from agg_hash_1);
--Testcase 583:
(select * from agg_hash_2 except select * from agg_group_2)
union all
(select * from agg_group_2 except select * from agg_hash_2);
--Testcase 584:
(select * from agg_hash_3 except select * from agg_group_3)
union all
(select * from agg_group_3 except select * from agg_hash_3);
--Testcase 585:
(select * from agg_hash_4 except select * from agg_group_4)
union all
(select * from agg_group_4 except select * from agg_hash_4);
-- Clean up
DO $d$
declare
l_rec record;
begin
for l_rec in (select foreign_table_schema, foreign_table_name
from information_schema.foreign_tables) loop
execute format('drop foreign table %I.%I cascade;', l_rec.foreign_table_schema, l_rec.foreign_table_name);
end loop;
end;
$d$;
--Testcase 586:
DROP SERVER sqlite_svr CASCADE;
--Testcase 587:
DROP EXTENSION duckdb_fdw CASCADE; | the_stack |
-- 2019-12-13T11:43:49.193Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET IsSelectionColumn='Y', SelectionColumnSeqNo=50,Updated=TO_TIMESTAMP('2019-12-13 12:43:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=551732
;
-- 2019-12-13T11:43:49.201Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET IsSelectionColumn='Y', SelectionColumnSeqNo=60,Updated=TO_TIMESTAMP('2019-12-13 12:43:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=551718
;
-- 2019-12-13T11:44:07.928Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET IsSelectionColumn='Y', SelectionColumnSeqNo=50,Updated=TO_TIMESTAMP('2019-12-13 12:44:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=551752
;
-- 2019-12-13T11:44:07.942Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET IsSelectionColumn='Y', SelectionColumnSeqNo=60,Updated=TO_TIMESTAMP('2019-12-13 12:44:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=551732
;
-- 2019-12-13T11:44:07.948Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Column SET IsSelectionColumn='Y', SelectionColumnSeqNo=70,Updated=TO_TIMESTAMP('2019-12-13 12:44:07','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Column_ID=551718
;
-- 2019-12-13T11:47:33.230Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550251
;
-- 2019-12-13T11:47:41.273Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550252
;
-- 2019-12-13T11:47:43.947Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550253
;
-- 2019-12-13T11:47:48.081Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550254
;
-- 2019-12-13T11:48:17.594Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=589596
;
-- 2019-12-13T11:48:17.597Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=589596
;
-- 2019-12-13T11:48:17.602Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=589596
;
-- 2019-12-13T11:48:22.793Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=589597
;
-- 2019-12-13T11:48:22.794Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=589597
;
-- 2019-12-13T11:48:22.798Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=589597
;
-- 2019-12-13T11:48:26.335Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=563224
;
-- 2019-12-13T11:48:26.337Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=589553
;
-- 2019-12-13T11:48:26.338Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=589553
;
-- 2019-12-13T11:48:26.343Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=589553
;
-- 2019-12-13T11:48:29.034Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=563223
;
-- 2019-12-13T11:48:29.035Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=589554
;
-- 2019-12-13T11:48:29.037Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=589554
;
-- 2019-12-13T11:48:29.042Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=589554
;
-- 2019-12-13T11:48:32.214Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=564071
;
-- 2019-12-13T11:48:32.215Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=591932
;
-- 2019-12-13T11:48:32.218Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=591932
;
-- 2019-12-13T11:48:32.222Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=591932
;
-- 2019-12-13T11:48:35.252Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=549245
;
-- 2019-12-13T11:48:35.255Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=555656
;
-- 2019-12-13T11:48:35.256Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=555656
;
-- 2019-12-13T11:48:35.259Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=555656
;
-- 2019-12-13T11:48:39.042Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=549244
;
-- 2019-12-13T11:48:39.044Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=555462
;
-- 2019-12-13T11:48:39.045Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=555462
;
-- 2019-12-13T11:48:39.048Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=555462
;
-- 2019-12-13T11:48:42.095Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=549242
;
-- 2019-12-13T11:48:42.097Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=555771
;
-- 2019-12-13T11:48:42.099Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=555771
;
-- 2019-12-13T11:48:42.103Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=555771
;
-- 2019-12-13T11:48:44.789Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=549243
;
-- 2019-12-13T11:48:44.790Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=555465
;
-- 2019-12-13T11:48:44.791Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=555465
;
-- 2019-12-13T11:48:44.794Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=555465
;
-- 2019-12-13T11:48:48.367Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_UI_Element WHERE AD_UI_Element_ID=549239
;
-- 2019-12-13T11:48:48.369Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Element_Link WHERE AD_Field_ID=555773
;
-- 2019-12-13T11:48:48.370Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field_Trl WHERE AD_Field_ID=555773
;
-- 2019-12-13T11:48:48.373Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Field WHERE AD_Field_ID=555773
;
-- 2019-12-13T11:48:58.414Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=569179
;
-- 2019-12-13T11:48:58.418Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=569179
;
-- 2019-12-13T11:49:01.634Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=569180
;
-- 2019-12-13T11:49:01.638Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=569180
;
ALTER Table EDI_DesadvLine DROP COLUMN M_HU_PackagingCode_TU_ID;
ALTER Table EDI_DesadvLine DROP COLUMN M_HU_PackagingCode_LU_ID;
ALTER Table EDI_DesadvLine DROP COLUMN BestBeforeDate;
-- 2019-12-13T11:50:05.985Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550031
;
-- 2019-12-13T11:50:11.911Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550033
;
-- 2019-12-13T11:50:16.157Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550035
;
-- 2019-12-13T11:50:30.481Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=569178
;
-- 2019-12-13T11:50:30.489Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=569178
;
-- 2019-12-13T11:50:32.826Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=569177
;
-- 2019-12-13T11:50:32.829Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=569177
;
ALTER TABLE EDI_DesadvLine ALTER COLUMN IPA_SSCC18 DROP NOT NULL;
ALTER TABLE EDI_DesadvLine RENAME COLUMN IPA_SSCC18 TO IPA_SSCC18_bkp;
ALTER TABLE EDI_DesadvLine ALTER COLUMN IsManual_IPA_SSCC18 DROP NOT NULL;
ALTER TABLE EDI_DesadvLine RENAME COLUMN IsManual_IPA_SSCC18 TO IsManual_IPA_SSCC18_bkp;
ALTER TABLE EDI_DesadvLine ALTER COLUMN M_HU_ID DROP NOT NULL;
ALTER TABLE EDI_DesadvLine RENAME COLUMN M_HU_ID TO M_HU_ID_bkp;
ALTER TABLE EDI_DesadvLine ALTER COLUMN QtyDeliveredInUOM DROP NOT NULL;
ALTER TABLE EDI_DesadvLine RENAME COLUMN QtyDeliveredInUOM TO QtyDeliveredInUOM_bkp;
ALTER TABLE EDI_DesadvLine ALTER COLUMN QtyItemCapacity DROP NOT NULL;
ALTER TABLE EDI_DesadvLine RENAME COLUMN QtyItemCapacity TO QtyItemCapacity_bkp;
-- 2019-12-13T11:52:36.774Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=551897
;
-- 2019-12-13T11:52:36.779Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=551897
;
-- 2019-12-13T11:53:29.731Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=551755
;
-- 2019-12-13T11:53:29.735Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=551755
;
-- 2019-12-13T11:54:10.956Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550257
;
-- 2019-12-13T11:54:19.116Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=569642
;
-- 2019-12-13T11:54:19.119Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=569642
;
-- 2019-12-13T11:56:51.595Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=551749
;
-- 2019-12-13T11:56:51.599Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=551749
;
-- 2019-12-13T11:58:05.861Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550084
;
-- 2019-12-13T11:58:46.742Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=552032
;
-- 2019-12-13T11:58:46.746Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=552032
;
-- 2019-12-13T11:59:45.377Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM EXP_FormatLine WHERE EXP_FormatLine_ID=550085
;
-- 2019-12-13T11:59:48.692Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column_Trl WHERE AD_Column_ID=552030
;
-- 2019-12-13T11:59:48.699Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
DELETE FROM AD_Column WHERE AD_Column_ID=552030
;
-- 2019-12-13T12:02:39.210Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Tab SET InternalName='EDI_DesadvLine_Pack',Updated=TO_TIMESTAMP('2019-12-13 13:02:39','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Tab_ID=542152
;
-- 2019-12-13T12:03:48.517Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,AD_UI_ElementType,Created,CreatedBy,IsActive,IsAdvancedField,IsAllowFiltering,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,IsMultiLine,MultiLine_LinesCount,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy) VALUES (0,593415,0,542152,543218,564726,'F',TO_TIMESTAMP('2019-12-13 13:03:48','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','N','Y','N','N','N',0,'M_HU_PackagingCode_TU_ID',95,0,0,TO_TIMESTAMP('2019-12-13 13:03:48','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 2019-12-13T12:04:34.939Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
INSERT INTO AD_UI_Element (AD_Client_ID,AD_Field_ID,AD_Org_ID,AD_Tab_ID,AD_UI_ElementGroup_ID,AD_UI_Element_ID,AD_UI_ElementType,Created,CreatedBy,IsActive,IsAdvancedField,IsAllowFiltering,IsDisplayed,IsDisplayedGrid,IsDisplayed_SideList,IsMultiLine,MultiLine_LinesCount,Name,SeqNo,SeqNoGrid,SeqNo_SideList,Updated,UpdatedBy) VALUES (0,593413,0,542152,543218,564727,'F',TO_TIMESTAMP('2019-12-13 13:04:34','YYYY-MM-DD HH24:MI:SS'),100,'Y','N','N','Y','N','N','N',0,'M_HU_PackagingCode_LU_ID',105,0,0,TO_TIMESTAMP('2019-12-13 13:04:34','YYYY-MM-DD HH24:MI:SS'),100)
;
-- 2019-12-13T12:05:10.272Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET IsDisplayed='N', IsDisplayedGrid='N',Updated=TO_TIMESTAMP('2019-12-13 13:05:10','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=593416
;
-- 2019-12-13T12:05:17.533Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_Field SET IsDisplayed='N', IsDisplayedGrid='N',Updated=TO_TIMESTAMP('2019-12-13 13:05:17','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=593414
;
-- 2019-12-13T12:11:31.695Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=75,Updated=TO_TIMESTAMP('2019-12-13 13:11:31','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564561
;
-- 2019-12-13T12:11:59.231Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET SeqNo=27,Updated=TO_TIMESTAMP('2019-12-13 13:11:59','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564354
;
-- 2019-12-13T12:12:38.785Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=90,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564561
;
-- 2019-12-13T12:12:38.790Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=110,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564726
;
-- 2019-12-13T12:12:38.794Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=120,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564361
;
-- 2019-12-13T12:12:38.798Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=130,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564727
;
-- 2019-12-13T12:12:38.802Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=140,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564362
;
-- 2019-12-13T12:12:38.806Z
-- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator
UPDATE AD_UI_Element SET IsDisplayedGrid='Y', SeqNoGrid=150,Updated=TO_TIMESTAMP('2019-12-13 13:12:38','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_UI_Element_ID=564353
; | the_stack |
--
-- XDBPM_ANNOTATE_XMLSCHEMA should be created under XDBPM
--
alter session set current_schema = XDBPM
/
--
create or replace package XDBPM_ANNOTATE_XMLSCHEMA
authid CURRENT_USER
as
function XSD_GROUP return VARCHAR2 deterministic;
function XSD_ELEMENT return VARCHAR2 deterministic;
function XSD_ATTRIBUTE return VARCHAR2 deterministic;
function XSD_COMPLEX_TYPE return VARCHAR2 deterministic;
function ELEMENT return VARCHAR2 deterministic;
function COMPLEX_TYPE return VARCHAR2 deterministic;
procedure addXDBNamespace(P_XML_SCHEMA in out XMLType);
procedure addStoreVarrayAsTable(P_XML_SCHEMA in out XMLType);
procedure setTimeStampWithTimeZone(P_XML_SCHEMA in out xmlType);
procedure addSQLTypeMapping(P_XML_SCHEMA in out XMLType, P_XSD_TYPE VARCHAR2, P_SQL_TYPE VARCHAR2);
procedure disableDomFidelity(P_XML_SCHEMA in out XMLType);
procedure addMaintainDom(P_XML_SCHEMA in out XMLType, P_MAINTAIN_DOM_SETTING VARCHAR2);
procedure addMaintainDom(P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2, P_MAINTAIN_DOM_SETTING VARCHAR2);
procedure setMaintainDomTrue(P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2);
procedure addDefaultTable(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2, P_DEFAULT_TABLE_NAME VARCHAR2);
procedure disableDefaultTables(P_XML_SCHEMA in out XMLType);
procedure disableDefaultTable(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2);
procedure addSQLName (P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_OBJECT VARCHAR2, P_LOCAL_NAME VARCHAR2, P_SQL_NAME VARCHAR2);
procedure addSQLType (P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2, P_SQL_TYPE VARCHAR2);
procedure addSQLType (P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_OBJECT VARCHAR2, P_LOCAL_NAME VARCHAR2, P_SQL_TYPE VARCHAR2);
procedure setAnyStorage (P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_SQL_TYPE_NAME VARCHAR2);
procedure addSQLCollType(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SQL_COLLECTION_TYPE VARCHAR2);
procedure makeOutOfLine (P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2);
procedure addTableProps(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2, P_TABLE_PROPERTIES VARCHAR2);
procedure addTableProps(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_TABLE_PROPERTIES VARCHAR2);
procedure makeOutOfLine(P_XML_SCHEMA in out XMLType, P_ELEMENT_REF VARCHAR2, P_DEFAULT_TABLE VARCHAR2);
procedure makeOutOfLine(P_XML_SCHEMA in out XMLType, P_ELEMENT_NAME VARCHAR2, P_ELEMENT_TYPE VARCHAR2, P_DEFAULT_TABLE VARCHAR2);
procedure addNewEnumerationValue(P_XML_SCHEMA in out XMLType, P_TARGET_XPATH VARCHAR2, P_NEW_ENUMERATION_VALUE VARCHAR2);
procedure fixImportLocation(P_XML_SCHEMA in out XMLType, P_OLD_LOCATION VARCHAR2, P_NEW_LOCATION VARCHAR2);
procedure fixIncludeLocation(P_XML_SCHEMA in out XMLType, P_OLD_LOCATION VARCHAR2, P_NEW_LOCATION VARCHAR2);
procedure fixRelativeURLs(P_XML_SCHEMA in out XMLType, P_SCHEMA_LOCATION_HINT VARCHAR2);
procedure fixWindowsURLs(P_XML_SCHEMA in out XMLType);
procedure fixEmptyComplexType(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2);
procedure fixComplexTypeSimpleContent(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SIMPLE_TYPE_NAME VARCHAR2);
procedure expandGroup(P_XML_SCHEMA in out XMLType, P_GROUP_NAME VARCHAR2, P_XSD_DIRECTORY VARCHAR2);
procedure removeAppInfo(P_XML_SCHEMA in out XMLType);
procedure addSQLType (P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SQL_TYPE VARCHAR2);
procedure addSQLCollType(P_XML_SCHEMA in out XMLType, P_ELEMENT_NAME VARCHAR2, P_SQL_COLLECTION_TYPE VARCHAR2);
procedure makeOutOfLine_(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2);
procedure makeRefOutOfLine_(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2);
$IF DBMS_DB_VERSION.VER_LE_10_2 $THEN
$ELSE
function getSchemaAnnotations(xmlSchema XMLType) return XMLType;
$END
end;
/
show errors
--
create or replace synonym XDB_ANNOTATE_XMLSCHEMA for XDBPM_ANNOTATE_XMLSCHEMA
/
create or replace synonym XDB_ANNOTATE_SCHEMA for XDBPM_ANNOTATE_XMLSCHEMA
/
grant execute on XDBPM_ANNOTATE_XMLSCHEMA to public
/
create or replace package body XDBPM_ANNOTATE_XMLSCHEMA
as
--
function ELEMENT
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_ELEMENT;
end;
--
function COMPLEX_TYPE
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_COMPLEX_TYPE;
end;
--
function XSD_GROUP
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_GROUP;
end;
--
function XSD_ELEMENT
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_ELEMENT;
end;
--
function XSD_ATTRIBUTE
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_ATTRIBUTE;
end;
--
function XSD_COMPLEX_TYPE
return VARCHAR2 deterministic
as
begin
return DBMS_XDB_CONSTANTS.XSD_COMPLEX_TYPE;
end;
--
procedure addXDBNamespace(P_XML_SCHEMA in out XMLType)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.addXDBNamespace(P_XML_SCHEMA);
end;
--
procedure disableDomFidelity(P_XML_SCHEMA in out XMLType)
as
begin DBMS_XMLSCHEMA_ANNOTATE.disableMaintainDOM(P_XML_SCHEMA);
end;
--
procedure setMaintainDomTrue(P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2)
as
begin DBMS_XMLSCHEMA_ANNOTATE.enableMaintainDOM(P_XML_SCHEMA,P_GLOBAL_NAME);
end;
--
procedure setTimeStampWithTimeZone(P_XML_SCHEMA in out xmlType)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setTimeStampWithTimeZone(P_XML_SCHEMA);
end;
--
procedure addMaintainDom(P_XML_SCHEMA in out XMLType, P_MAINTAIN_DOM_SETTING VARCHAR2)
as
begin
if (UPPER(P_MAINTAIN_DOM_SETTING) = 'TRUE') then
DBMS_XMLSCHEMA_ANNOTATE.enableMaintainDOM(P_XML_SCHEMA);
else
DBMS_XMLSCHEMA_ANNOTATE.disableMaintainDOM(P_XML_SCHEMA);
end if;
end ;
--
procedure addMaintainDom(P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2, P_MAINTAIN_DOM_SETTING VARCHAR2)
as
begin
if (UPPER(P_MAINTAIN_DOM_SETTING) = 'TRUE') then
DBMS_XMLSCHEMA_ANNOTATE.enableMaintainDOM(P_XML_SCHEMA,P_GLOBAL_NAME);
else
DBMS_XMLSCHEMA_ANNOTATE.disableMaintainDOM(P_XML_SCHEMA,P_GLOBAL_NAME);
end if;
end;
--
procedure addDefaultTable(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2, P_DEFAULT_TABLE_NAME VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setDefaultTable(P_XML_SCHEMA,P_GLOBAL_ELEMENT,P_DEFAULT_TABLE_NAME);
end;
--
procedure disableDefaultTables(P_XML_SCHEMA in out XMLType)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.disableDefaultTableCreation(P_XML_SCHEMA);
end;
--
procedure disableDefaultTable(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.disableDefaultTableCreation(P_XML_SCHEMA,P_GLOBAL_ELEMENT);
end;
--
procedure addSQLName(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_OBJECT VARCHAR2, P_LOCAL_NAME VARCHAR2, P_SQL_NAME VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLName(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_OBJECT,P_LOCAL_NAME,P_SQL_NAME);
end;
--
procedure addSQLType(P_XML_SCHEMA in out XMLType, P_GLOBAL_NAME VARCHAR2, P_SQL_TYPE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLType(P_XML_SCHEMA,P_GLOBAL_NAME,P_SQL_TYPE);
end;
--
procedure addSQLType(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_OBJECT VARCHAR2, P_LOCAL_NAME VARCHAR2, P_SQL_TYPE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLType(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_OBJECT,P_LOCAL_NAME,P_SQL_TYPE);
end;
--
procedure addSQLCollType(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SQL_COLLECTION_TYPE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLCollType(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_ELEMENT_NAME,P_SQL_COLLECTION_TYPE);
end;
--
procedure makeOutOfLine (P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setOutOfLine(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_ELEMENT_NAME,P_DEFAULT_TABLE);
end;
--
procedure addTableProps(P_XML_SCHEMA in out XMLType, P_GLOBAL_ELEMENT VARCHAR2, P_TABLE_PROPERTIES VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setTableProps(P_XML_SCHEMA,P_GLOBAL_ELEMENT,P_TABLE_PROPERTIES);
end;
--
procedure addTableProps(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_TABLE_PROPERTIES VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setTableProps(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_ELEMENT_NAME,P_TABLE_PROPERTIES);
end;
--
procedure addSQLTypeMapping(P_XML_SCHEMA in out XMLType, P_XSD_TYPE VARCHAR2, P_SQL_TYPE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLTypeMapping(P_XML_SCHEMA,P_XSD_TYPE,P_SQL_TYPE);
end;
--
procedure makeOutOfLine(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setOutOfLine(P_XML_SCHEMA,P_COMPLEX_TYPE_NAME,P_LOCAL_ELEMENT_NAME,P_DEFAULT_TABLE);
end;
--
procedure makeOutOfLine(P_XML_SCHEMA in out XMLType, P_ELEMENT_REF VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setOutOfLine(P_XML_SCHEMA,P_ELEMENT_REF,P_DEFAULT_TABLE);
end;
--
procedure makeOutOfLine(P_XML_SCHEMA in out XMLType, P_ELEMENT_NAME VARCHAR2, P_ELEMENT_TYPE VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setOutOfLine(P_XML_SCHEMA,P_ELEMENT_NAME,P_ELEMENT_TYPE,P_DEFAULT_TABLE);
end;
--
procedure addSQLCollType(P_XML_SCHEMA in out XMLType, P_ELEMENT_NAME VARCHAR2, P_SQL_COLLECTION_TYPE VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setSQLCollType(P_XML_SCHEMA,P_ELEMENT_NAME,P_SQL_COLLECTION_TYPE);
end;
--
procedure setAnyStorage(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_SQL_TYPE_NAME VARCHAR2)
as
begin
DBMS_XMLSCHEMA_ANNOTATE.setAnyStorage(P_XML_SCHEMA,P_COMPLEX_TYPE_NAME,P_SQL_TYPE_NAME);
end;
--
procedure addNewEnumerationValue(P_XML_SCHEMA in out XMLType, P_TARGET_XPATH VARCHAR2, P_NEW_ENUMERATION_VALUE VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.addNewEnumerationValue(P_XML_SCHEMA,P_TARGET_XPATH,P_NEW_ENUMERATION_VALUE);
end;
--
procedure fixImportLocation(P_XML_SCHEMA in out XMLType, P_OLD_LOCATION VARCHAR2, P_NEW_LOCATION VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.fixImportLocation(P_XML_SCHEMA,P_OLD_LOCATION,P_NEW_LOCATION);
end;
--
procedure fixIncludeLocation(P_XML_SCHEMA in out XMLType, P_OLD_LOCATION VARCHAR2, P_NEW_LOCATION VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.fixIncludeLocation(P_XML_SCHEMA,P_OLD_LOCATION,P_NEW_LOCATION);
end;
--
procedure fixRelativeURLs(P_XML_SCHEMA in out XMLType, P_SCHEMA_LOCATION_HINT VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.fixRelativeURLs(P_XML_SCHEMA,P_SCHEMA_LOCATION_HINT);
end;
--
procedure fixWindowsURLs(P_XML_SCHEMA in out XMLType)
as
begin
XDB_EDIT_XMLSCHEMA.fixWindowsURLs(P_XML_SCHEMA);
end;
--
procedure fixEmptyComplexType(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.fixEmptyComplexType(P_XML_SCHEMA,P_COMPLEX_TYPE_NAME);
end;
--
procedure fixComplexTypeSimpleContent(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SIMPLE_TYPE_NAME VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.fixComplexTypeSimpleContent(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,P_LOCAL_ELEMENT_NAME,P_SIMPLE_TYPE_NAME);
end;
--
procedure removeAppInfo(P_XML_SCHEMA in out XMLType)
as
begin
XDB_EDIT_XMLSCHEMA.removeAppInfo(P_XML_SCHEMA);
end;
--
procedure expandGroup(P_XML_SCHEMA in out XMLType, P_GROUP_NAME VARCHAR2, P_XSD_DIRECTORY VARCHAR2)
as
begin
XDB_EDIT_XMLSCHEMA.loadGroupDefinitions(P_XSD_DIRECTORY);
XDB_EDIT_XMLSCHEMA.expandRepeatingGroups(P_XML_SCHEMA);
end;
--
procedure makeOutOfLine_(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
makeOutOfLine(P_XML_SCHEMA,XDBPM_ANNOTATE_XMLSCHEMA.XSD_COMPLEX_TYPE,P_COMPLEX_TYPE_NAME,P_LOCAL_ELEMENT_NAME,P_DEFAULT_TABLE);
end;
--
procedure makeRefOutOfLine_(P_XML_SCHEMA in out XMLType, P_COMPLEX_TYPE_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_DEFAULT_TABLE VARCHAR2)
as
begin
makeOutOfLine(P_XML_SCHEMA,XDBPM_ANNOTATE_XMLSCHEMA.XSD_COMPLEX_TYPE,P_COMPLEX_TYPE_NAME,P_LOCAL_ELEMENT_NAME,P_DEFAULT_TABLE);
end;
--
procedure addSQLType(P_XML_SCHEMA in out XMLType, P_GLOBAL_OBJECT VARCHAR2, P_GLOBAL_NAME VARCHAR2, P_LOCAL_ELEMENT_NAME VARCHAR2, P_SQL_TYPE VARCHAR2)
is
begin
addSQLType(P_XML_SCHEMA,P_GLOBAL_OBJECT,P_GLOBAL_NAME,XDBPM_ANNOTATE_XMLSCHEMA.XSD_ELEMENT,P_LOCAL_ELEMENT_NAME,P_SQL_TYPE);
end;
--
procedure addStoreVarrayAsTable(P_XML_SCHEMA in out XMLType)
as
begin
if P_XML_SCHEMA.existsNode('/xsd:schema[@xdb:storeVarrayAsTable]',XDB_NAMESPACES.XDBSCHEMA_PREFIXES) = 0 then
select insertChildXML
(
P_XML_SCHEMA,
'/xsd:schema',
'@xdb:storeVarrayAsTable',
'true',
XDB_NAMESPACES.XDBSCHEMA_PREFIXES
)
into P_XML_SCHEMA
from dual;
end if;
end;
--
$IF DBMS_DB_VERSION.VER_LE_10_2 $THEN
$ELSE
function getSchemaAnnotations(xmlSchema XMLType)
return XMLType
as
begin
return DBMS_XMLSCHEMA_ANNOTATE.getSchemaAnnotations(XMLSCHEMA);
end;
--
$END
end XDBPM_ANNOTATE_XMLSCHEMA;
/
show errors
--
alter session set current_schema = SYS
/ | the_stack |
SET MODE MySQL;
/*------------------------------------------- create alarm---------------------------------------------------------------------*/
DROP TABLE IF EXISTS alarm;
CREATE TABLE IF NOT EXISTS alarm
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_name VARCHAR(100) NOT NULL,
alarm_type VARCHAR(200) NOT NULL,
description VARCHAR(1000) NOT NULL,
owner_key VARCHAR(200),
status VARCHAR(50) NOT NULL,
execute_result VARCHAR(50) NOT NULL DEFAULT 'NONE',
execute_at DATETIME,
job_id BIGINT NOT NULL DEFAULT 0,
cron VARCHAR(500) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL,
modify_at DATETIME NOT NULL,
team_name VARCHAR(200) NOT NULL
);
CREATE INDEX idx_alarm_ownerkey ON alarm (owner_key);
/*------------------------------------------- create alarm_log -------------------------------------------*/
DROP TABLE IF EXISTS alarm_log;
CREATE TABLE IF NOT EXISTS alarm_log
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_id BIGINT NOT NULL,
exe_start DATETIME NOT NULL,
exe_end DATETIME NOT NULL,
cost INT NOT NULL,
execute_result VARCHAR(50) NOT NULL,
verify_result VARCHAR(50) NOT NULL DEFAULT 'NONE',
message TEXT,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_alarm_log_createat_alarmid ON alarm_log (create_at, alarm_id);
/*------------------------------------------- create alert -------------------------------------------*/
DROP TABLE IF EXISTS alert;
CREATE TABLE IF NOT EXISTS alert
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_id BIGINT NOT NULL,
ways VARCHAR(500) NOT NULL,
silence BIGINT NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
allow_sms_from INTEGER NULL,
allow_sms_to INTEGER NULL,
ding_robot_hook VARCHAR(500) NULL,
http_post_url VARCHAR(500),
wechat_robot_hook VARCHAR(500) NULL
);
CREATE INDEX idx_alert_alarmid ON alert (alarm_id);
/*------------------------------------------- create alert_log -------------------------------------------*/
DROP TABLE IF EXISTS alert_log;
CREATE TABLE IF NOT EXISTS alert_log
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_id BIGINT NOT NULL,
execute_id BIGINT NOT NULL,
way VARCHAR(100) NOT NULL,
recipient VARCHAR(100) NOT NULL,
content TEXT NOT NULL,
in_silence VARCHAR(50) NOT NULL,
send_status VARCHAR(50) NOT NULL,
alert_type VARCHAR(50) NOT NULL,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_alert_log_createat_recipient ON alert_log (create_at, recipient);
CREATE INDEX idx_alert_log_alarmid ON alert_log (alarm_id);
/*------------------------------------------- create data_mapping -------------------------------------------*/
DROP TABLE IF EXISTS data_mapping;
CREATE TABLE IF NOT EXISTS data_mapping
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
data_name VARCHAR(200) NOT NULL,
field_name VARCHAR(200) NOT NULL,
field_type VARCHAR(200) NOT NULL,
field_description VARCHAR(500) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL,
modify_at DATETIME NOT NULL
);
CREATE INDEX idx_data_mapping_dataname ON data_mapping (data_name);
/*------------------------------------------- create data_name -------------------------------------------*/
DROP TABLE IF EXISTS data_name;
CREATE TABLE IF NOT EXISTS data_name
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
data_name VARCHAR(200) NOT NULL,
display_name VARCHAR(200) NOT NULL,
data_source_id BIGINT NOT NULL,
datasource_type VARCHAR(500) NOT NULL,
timestamp_field VARCHAR(200),
properties VARCHAR(2000) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL,
modify_at DATETIME NOT NULL
);
CREATE UNIQUE INDEX uniq_data_name_dataname ON data_name (data_name);
/*------------------------------------------- create data_source -------------------------------------------*/
DROP TABLE IF EXISTS data_source;
CREATE TABLE IF NOT EXISTS data_source
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
datasource_name VARCHAR(500) NOT NULL,
datasource_type VARCHAR(500) NOT NULL,
service_address VARCHAR(500) NOT NULL,
properties VARCHAR(2000),
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL,
modify_at DATETIME NOT NULL
);
/*------------------------------------------- create metric -------------------------------------------*/
DROP TABLE IF EXISTS metric;
CREATE TABLE IF NOT EXISTS metric
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
aggregation_type VARCHAR(100),
aggregation_field VARCHAR(100),
metric_type VARCHAR(100) NOT NULL,
alarm_id BIGINT NOT NULL,
rule_id BIGINT NOT NULL,
data_source_id BIGINT NOT NULL DEFAULT 0,
data_name_id BIGINT NOT NULL DEFAULT 0,
data_name VARCHAR(200) NOT NULL,
query_string VARCHAR(1000),
post_data VARCHAR(2000),
properties VARCHAR(2000),
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_metric_ruleid ON metric (rule_id);
CREATE INDEX idx_metric_alarmid ON metric (alarm_id);
/*------------------------------------------- create recipient -------------------------------------------*/
DROP TABLE IF EXISTS recipient;
CREATE TABLE IF NOT EXISTS recipient
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_id BIGINT NOT NULL,
alert_id BIGINT NOT NULL,
account VARCHAR(50) NOT NULL,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_recipient_alertid ON recipient (alert_id);
CREATE INDEX idx_recipient_alarmid ON recipient (alarm_id);
/*------------------------------------------- create rule -------------------------------------------*/
DROP TABLE IF EXISTS rule;
CREATE TABLE IF NOT EXISTS rule
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
rule_type VARCHAR(100) NOT NULL,
alarm_id BIGINT NOT NULL,
alert_template VARCHAR(5000) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_rule_alarmid ON rule (alarm_id);
/*------------------------------------------- create rule_property -------------------------------------------*/
DROP TABLE IF EXISTS rule_property;
CREATE TABLE IF NOT EXISTS rule_property
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
alarm_id BIGINT NOT NULL,
rule_id BIGINT NOT NULL,
prop_key VARCHAR(100) NOT NULL,
prop_value VARCHAR(1000) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL
);
CREATE INDEX idx_rule_property_alarmid ON rule_property (alarm_id);
CREATE INDEX idx_rule_property_ruleid ON rule_property (rule_id);
/*------------------------------------------- create department_info---------------------------------------------------------------------*/
DROP TABLE IF EXISTS department_info;
CREATE TABLE IF NOT EXISTS department_info
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
department_name VARCHAR(200) NOT NULL,
full_name VARCHAR(200) NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modify_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL
);
CREATE UNIQUE INDEX uniq_departmentname ON department_info (department_name);
/*------------------------------------------- create team_info---------------------------------------------------------------------*/
DROP TABLE IF EXISTS team_info;
CREATE TABLE IF NOT EXISTS team_info
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
team_name VARCHAR(200) NOT NULL,
full_name VARCHAR(200) NOT NULL,
department_id BIGINT NOT NULL,
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modify_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL
);
CREATE UNIQUE INDEX uniq_team_info_name ON team_info (team_name);
/*------------------------------------------- create user_info---------------------------------------------------------------------*/
DROP TABLE IF EXISTS user_info;
CREATE TABLE IF NOT EXISTS user_info
(
id BIGINT AUTO_INCREMENT PRIMARY KEY,
account VARCHAR(200) NOT NULL,
full_name VARCHAR(200) NOT NULL,
team_id BIGINT NOT NULL,
mobile VARCHAR(20),
email VARCHAR(50),
wxid VARCHAR(50),
creator VARCHAR(200) NOT NULL,
create_at DATETIME NOT NULL,
modify_at DATETIME NOT NULL,
modifier VARCHAR(200) NOT NULL
);
CREATE INDEX idx_user_info_createat ON user_info (create_at);
CREATE UNIQUE INDEX uniq_user_info_account ON user_info (account);
/*------------------------------------------- create user_role---------------------------------------------------------------------*/
DROP TABLE IF EXISTS user_role;
CREATE TABLE IF NOT EXISTS user_role
(
id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '自增主键',
account VARCHAR(200) NOT NULL COMMENT '账号',
role VARCHAR(200) NOT NULL COMMENT '角色',
creator VARCHAR(200) NOT NULL COMMENT '创建人',
create_at DATETIME NOT NULL COMMENT '创建时间'
)
ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COMMENT = '用户角色关系表';
ALTER TABLE user_role ADD INDEX idx_user_role_account (account);
/*------------------------------------------- init data---------------------------------------------------------------------*/
INSERT INTO department_info(department_name, full_name, creator, create_at, modify_at, modifier) VALUES ('default', '默认部门', 'admin', now(), now(), 'admin');
INSERT INTO team_info(team_name, full_name, department_id, creator, create_at, modify_at, modifier) VALUES ('default', '炒鸡赛亚人', 1, 'admin', now(), now(), 'admin');
INSERT INTO user_info(account, full_name, team_id, mobile, email, wxid, creator, create_at, modify_at, modifier) VALUES ('admin', '管理员', 1, null, 'xxx@163.com', 'wxid1', 'admin', now(), now(), 'admin');
INSERT INTO user_role(account, role, creator, create_at) VALUES('admin', 'admin', 'admin', now()); | the_stack |
import debug from 'debug';
import htmlToText from 'html-to-text';
import { List } from 'immutable';
import { ThemeType } from '../configs/theme';
import {
BlockType,
DiagramLayoutType,
FocusMode,
KeyType,
TopicRelationship
} from '../types';
import { createKey } from '../utils';
import { Block } from './block';
import { Config, ConfigRecordType } from './config';
import { DescBlockData } from './desc-block-data';
import { SheetModel } from './sheet-model';
import { Topic } from './topic';
import {
getAllSubTopicKeys,
getKeyPath,
getPrevTopicKey,
getRelationship
} from './utils';
const log = debug('modifier');
type ModifierArg =
| BaseSheetModelModifierArg
| SetTopicArg
| setTopicBlockDataArg
| SetFocusModeArg
| SetTopicStyleArg
| SetZoomFactorArg
| SetThemeArg
| SetLayoutDirArg
| SetConfigArg;
export type BaseSheetModelModifierArg = {
model: SheetModel;
topicKey?: KeyType;
topicKeys?: Array<KeyType>;
};
type SetTopicArg = BaseSheetModelModifierArg & {
topic: Topic;
};
type setTopicBlockDataArg = BaseSheetModelModifierArg & {
blockType: string;
data: any;
focusMode?: string;
};
type DeleteTopicBlockArg = BaseSheetModelModifierArg & {
blockType: string;
};
type SetFocusModeArg = BaseSheetModelModifierArg & {
focusMode: string;
};
type SetTopicStyleArg = BaseSheetModelModifierArg & {
style: string;
};
type SetZoomFactorArg = BaseSheetModelModifierArg & {
zoomFactor: number;
};
type SetThemeArg = BaseSheetModelModifierArg & {
theme: ThemeType;
};
type SetLayoutDirArg = BaseSheetModelModifierArg & {
layoutDir: DiagramLayoutType;
};
type SetConfigArg = BaseSheetModelModifierArg & {
config: Partial<ConfigRecordType>;
};
export type SheetModelModifierResult = SheetModel;
function toggleCollapse({
model,
topicKey
}: BaseSheetModelModifierArg): SheetModelModifierResult {
let topic = model.getTopic(topicKey);
if (topic && topic.subKeys.size !== 0) {
topic = topic.merge({
collapse: !topic.collapse
});
model = model.updateIn(
['topics', topic.key, 'collapse'],
collapse => !collapse
);
}
model = focusTopic({ model, topicKey, focusMode: FocusMode.NORMAL });
return model;
}
function collapseAll({
model
}: BaseSheetModelModifierArg): SheetModelModifierResult {
const topicKeys = getAllSubTopicKeys(model, model.editorRootTopicKey);
log(model);
model = model.withMutations(m => {
topicKeys.forEach(topicKey => {
m.setIn(['topics', topicKey, 'collapse'], true);
});
});
model = focusTopic({
model,
topicKey: model.editorRootTopicKey,
focusMode: FocusMode.NORMAL
});
return model;
}
function expandAll({
model
}: BaseSheetModelModifierArg): SheetModelModifierResult {
const topicKeys = getAllSubTopicKeys(model, model.editorRootTopicKey);
log(model);
model = model.withMutations(m => {
topicKeys.forEach(topicKey => {
m.setIn(['topics', topicKey, 'collapse'], false);
});
});
log(model);
return model;
}
function expandTo({
model,
topicKey
}: BaseSheetModelModifierArg): SheetModelModifierResult {
const keys = getKeyPath(model, topicKey).filter(t => t !== topicKey);
model = model.withMutations(m => {
keys.forEach(topicKey => {
m.setIn(['topics', topicKey, 'collapse'], false);
});
});
// 要让这个节点在视口中可见
if (
getRelationship(model, topicKey, model.editorRootTopicKey) !==
TopicRelationship.DESCENDANT
) {
model = model.set('editorRootTopicKey', model.rootTopicKey);
}
return model;
}
function focusTopic({
model,
topicKey,
focusMode = FocusMode.NORMAL
}: SetFocusModeArg): SheetModelModifierResult {
log('focus topic', focusMode);
if (!model.topics.has(topicKey)) {
throw new Error(`focus key ${topicKey} is not in model`);
}
if (topicKey !== model.focusKey) model = model.set('focusKey', topicKey);
// if (focusMode !== model.focusMode) model = model.set('focusMode', focusMode);
model = model.set('focusMode', focusMode);
if (model.selectedKeys != null) model = model.set('selectedKeys', null);
return model;
}
function setFocusMode({
model,
focusMode
}: SetFocusModeArg): SheetModelModifierResult {
log('setFocusMode');
// SHOW_POPUP一定要重新设置, 因为可能dialogType 改变了
if (focusMode !== model.focusMode || focusMode === FocusMode.SHOW_POPUP)
model = model.set('focusMode', focusMode);
return model;
}
function addChild({
model,
topicKey,
addAtFront = false
}: BaseSheetModelModifierArg & {
addAtFront: boolean;
}): SheetModelModifierResult {
log('addChild:', topicKey);
let topic = model.getTopic(topicKey);
if (topic) {
const child = Topic.create({ key: createKey(), parentKey: topic.key });
topic = topic
.set('collapse', false)
.update('subKeys', subKeys =>
addAtFront ? subKeys.unshift(child.key) : subKeys.push(child.key)
);
model = model.update('topics', topics =>
topics.set(topicKey, topic).set(child.key, child)
);
return focusTopic({
model,
topicKey: child.key,
focusMode: FocusMode.EDITING_CONTENT
});
}
return model;
}
function addSibling({
model,
topicKey,
content
}: BaseSheetModelModifierArg & {
content?: string;
}): SheetModelModifierResult {
if (topicKey === model.rootTopicKey) return model;
const topic = model.getTopic(topicKey);
if (topic) {
const pItem = model.getTopic(topic.parentKey);
const idx = pItem.subKeys.indexOf(topicKey);
const sibling = Topic.create({
key: createKey(),
parentKey: pItem.key,
content
});
model = model
.update('topics', topics => topics.set(sibling.key, sibling))
.updateIn(['topics', pItem.key, 'subKeys'], subKeys =>
subKeys.insert(idx + 1, sibling.key)
);
return focusTopic({
model,
topicKey: sibling.key,
focusMode: FocusMode.EDITING_CONTENT
});
}
return model;
}
function topicContentToPlainText({
model,
topicKey
}: BaseSheetModelModifierArg): SheetModelModifierResult {
const content = model.getTopic(topicKey).getBlock(BlockType.CONTENT).block
.data;
const data = htmlToText.fromString(content, { preserveNewlines: true });
return setTopicBlockContentData({ model, topicKey, data });
}
function deleteTopic({
model,
topicKey
}: BaseSheetModelModifierArg): SheetModelModifierResult {
if (topicKey === model.editorRootTopicKey) return model;
const item = model.getTopic(topicKey);
if (item) {
model = model.withMutations(m => {
m.update('topics', topics => {
topics = topics.delete(topicKey);
const deleteKeys = getAllSubTopicKeys(model, topicKey);
topics = topics.withMutations(t => {
deleteKeys.forEach(dKey => {
t.delete(dKey);
});
});
return topics;
});
m.updateIn(['topics', item.parentKey, 'subKeys'], subKeys =>
subKeys.delete(subKeys.indexOf(topicKey))
);
if (m.focusKey === topicKey)
m.set('focusKey', getPrevTopicKey(model, topicKey)).set(
'focusMode',
FocusMode.EDITING_CONTENT
);
});
}
return model;
}
function deleteTopics({ model, topicKeys }): SheetModelModifierResult {
if (topicKeys == null) topicKeys = model.focusOrSelectedKeys;
topicKeys.forEach(topicKey => {
model = deleteTopic({ model, topicKey });
});
return model;
}
/**
* setTopicBlockData of one topic
* @param model
* @param topicKey
* @param blockType
* @param focusMode
* @param data
*/
function setTopicBlockData({
model,
topicKey,
blockType,
focusMode,
data
}: setTopicBlockDataArg): SheetModelModifierResult {
const topic = model.getTopic(topicKey);
if (topic) {
const { index, block } = topic.getBlock(blockType);
if (index === -1) {
model = model.updateIn(['topics', topicKey, 'blocks'], blocks =>
blocks.push(
Block.create({
type: blockType,
data: data
})
)
);
} else {
if (block.data !== data) {
model = model.updateIn(
['topics', topicKey, 'blocks', index, 'data'],
dt => data
);
}
}
if (focusMode) {
model = focusTopic({
model,
topicKey,
focusMode
});
}
}
return model;
}
function setTopicBlockContentData({
model,
topicKey,
focusMode = null,
data
}): SheetModelModifierResult {
return setTopicBlockData({
model,
topicKey,
focusMode,
data,
blockType: BlockType.CONTENT
});
}
function deleteTopicBlock({ model, topicKey, blockType }: DeleteTopicBlockArg) {
const topic = model.getTopic(topicKey);
if (topic) {
const { index } = topic.getBlock(blockType);
if (index !== -1) {
model = model.updateIn(['topics', topicKey, 'blocks'], blocks =>
blocks.delete(index)
);
}
model = setFocusMode({
model,
focusMode: FocusMode.NORMAL
});
}
return model;
}
function deleteTopicBlockDesc({ model, topicKey }: BaseSheetModelModifierArg) {
return deleteTopicBlock({ model, topicKey, blockType: BlockType.DESC });
}
function setStyle({
model,
topicKey,
style
}: SetTopicStyleArg): SheetModelModifierResult {
const topic = model.getTopic(topicKey);
if (topic) {
if (style !== topic.style) {
model = model.updateIn(['topics', topicKey, 'style'], s => style);
}
}
return model;
}
function clearAllCustomStyle({
model
}: BaseSheetModelModifierArg): SheetModelModifierResult {
model = model.withMutations(model => {
model.topics.keySeq().forEach(key => {
model.setIn(['topics', key, 'style'], null);
});
});
return model;
}
function setTheme({ model, theme }: SetThemeArg): SheetModelModifierResult {
model = model.setIn(['config', 'theme'], theme);
return model;
}
function setLayoutDir({
model,
layoutDir
}: SetLayoutDirArg): SheetModelModifierResult {
if (model.config.layoutDir === layoutDir) return model;
model = model.setIn(['config', 'layoutDir'], layoutDir);
return model;
}
function setConfig({ model, config }: SetConfigArg) {
return model.set('config', model.config.merge(config));
}
function setEditorRootTopicKey({
model,
topicKey
}: BaseSheetModelModifierArg): SheetModelModifierResult {
if (model.editorRootTopicKey !== topicKey)
model = model.set('editorRootTopicKey', topicKey);
if (model.getTopic(topicKey).collapse)
model = model.setIn(['topics', topicKey, 'collapse'], false);
return model;
}
function setZoomFactor({
model,
zoomFactor
}: SetZoomFactorArg): SheetModelModifierResult {
if (model.zoomFactor !== zoomFactor)
model = model.set('zoomFactor', zoomFactor);
return model;
}
function startEditingContent({ model, topicKey }: BaseSheetModelModifierArg) {
return focusTopic({
model,
topicKey,
focusMode: FocusMode.EDITING_CONTENT
});
}
function startEditingDesc({ model, topicKey }: BaseSheetModelModifierArg) {
const topic = model.getTopic(topicKey);
const desc = topic.getBlock(BlockType.DESC);
if (desc.block == null || desc.block.data == null) {
model = SheetModelModifier.setTopicBlockData({
model,
topicKey,
blockType: BlockType.DESC,
data: new DescBlockData({ kind: 'html', data: '', collapse: false })
});
}
model = SheetModelModifier.focusTopic({
model,
topicKey,
focusMode: FocusMode.EDITING_DESC
});
return model;
}
function dragAndDrop({ model, srcKey, dstKey, dropDir }) {
const srcTopic = model.getTopic(srcKey);
const dstTopic = model.getTopic(dstKey);
const srcParentKey = srcTopic.parentKey;
const srcParentTopic = model.getTopic(srcParentKey);
let srcParentSubKeys = srcParentTopic.subKeys;
const srcIndex = srcParentSubKeys.indexOf(srcKey);
srcParentSubKeys = srcParentSubKeys.delete(srcIndex);
if (dropDir === 'in') {
let dstSubKeys = dstTopic.subKeys;
dstSubKeys = dstSubKeys.push(srcKey);
model = model.withMutations(m => {
m.setIn(['topics', srcParentKey, 'subKeys'], srcParentSubKeys)
.setIn(['topics', srcKey, 'parentKey'], dstKey)
.setIn(['topics', dstKey, 'subKeys'], dstSubKeys)
.setIn(['topics', dstKey, 'collapse'], false);
});
} else {
const dstParentKey = dstTopic.parentKey;
const dstParentItem = model.getTopic(dstParentKey);
let dstParentSubKeys = dstParentItem.subKeys;
const dstIndex = dstParentSubKeys.indexOf(dstKey);
//src 和 dst 的父亲相同,这种情况要做特殊处理
if (srcParentKey === dstParentKey) {
let newDstParentSubKeys = List();
dstParentSubKeys.forEach(key => {
if (key !== srcKey) {
if (key === dstKey) {
if (dropDir === 'prev') {
newDstParentSubKeys = newDstParentSubKeys.push(srcKey).push(key);
} else {
newDstParentSubKeys = newDstParentSubKeys.push(key).push(srcKey);
}
} else {
newDstParentSubKeys = newDstParentSubKeys.push(key);
}
}
});
model = model.withMutations(m => {
m.setIn(['topics', dstParentKey, 'subKeys'], newDstParentSubKeys);
});
} else {
if (dropDir === 'prev') {
dstParentSubKeys = dstParentSubKeys.insert(dstIndex, srcKey);
} else if (dropDir === 'next') {
dstParentSubKeys = dstParentSubKeys.insert(dstIndex + 1, srcKey);
}
model = model.withMutations(m => {
m.setIn(['topics', srcParentKey, 'subKeys'], srcParentSubKeys)
.setIn(['topics', srcKey, 'parentKey'], dstParentKey)
.setIn(['topics', dstParentKey, 'subKeys'], dstParentSubKeys)
.setIn(['topics', dstParentKey, 'collapse'], false);
});
}
}
return model;
}
function swapUp({
model,
topicKeys
}: BaseSheetModelModifierArg): SheetModelModifierResult {
if (topicKeys == null) topicKeys = model.focusOrSelectedKeys;
const firstKey = topicKeys[0];
const parent = model.getParentTopic(firstKey);
const idxArray = [];
for (const itemKey of topicKeys) {
const idx = parent.subKeys.indexOf(itemKey);
// 如果topicKeys不是sibling 关系
if (idx === -1) return model;
idxArray.push(idx);
}
// 对序号进行排序
idxArray.sort((a, b) => a - b);
const firstIdx = idxArray[0];
if (firstIdx === 0) {
return model;
} else {
const sortedItemKeys = idxArray.map(idx => parent.subKeys.get(idx));
model = model.updateIn(['topics', parent.key, 'subKeys'], subKeys =>
subKeys
.splice(idxArray[0], idxArray.length)
.splice(idxArray[0] - 1, 0, ...sortedItemKeys)
);
}
return model;
}
function swapDown({
model,
topicKeys
}: BaseSheetModelModifierArg): SheetModelModifierResult {
if (topicKeys == null) topicKeys = model.focusOrSelectedKeys;
const firstKey = topicKeys[0];
const parent = model.getParentTopic(firstKey);
const idxArray = [];
for (const itemKey of topicKeys) {
const idx = parent.subKeys.indexOf(itemKey);
if (idx === -1) return model;
idxArray.push(idx);
}
idxArray.sort((a, b) => a - b);
const lastIdx = idxArray[idxArray.length - 1];
if (lastIdx === parent.subKeys.size - 1) {
return model;
} else {
const sortedItemKeys = idxArray.map(idx => parent.subKeys.get(idx));
model = model.updateIn(['topics', parent.key, 'subKeys'], subKeys =>
subKeys
.splice(idxArray[0], idxArray.length)
.splice(idxArray[0] + 1, 0, ...sortedItemKeys)
);
}
return model;
}
function addMultiSibling({
model,
topicKey,
contentArray,
topicArray
}: BaseSheetModelModifierArg & {
contentArray?: string[];
topicArray?: Array<Topic>;
}) {
const topic = model.getTopic(topicKey);
const parentKey = topic.parentKey;
const parentTopic = model.getTopic(parentKey);
const idx = parentTopic.subKeys.indexOf(topicKey);
if (contentArray) {
const siblings = contentArray.map(content =>
Topic.create({
key: createKey(),
parentKey,
content
})
);
const siblingsKeys = siblings.map(s => s.key);
model = model.withMutations(model => {
siblings.forEach(sibling => {
model.update('topics', topics => topics.set(sibling.key, sibling));
});
model.updateIn(['topics', parentKey, 'subKeys'], subKeys =>
subKeys.splice(idx + 1, 0, ...siblingsKeys)
);
});
model = focusTopic({
model,
topicKey: siblingsKeys[siblingsKeys.length - 1],
focusMode: FocusMode.EDITING_CONTENT
});
} else if (topicArray) {
}
return model;
}
function addMultiChild({
model,
topicKey,
addAtFront = false,
contentArray,
topicArray
}: BaseSheetModelModifierArg & {
addAtFront?: boolean;
contentArray?: string[];
topicArray?: Array<Topic>;
}) {
if (contentArray) {
topicArray = contentArray.map(content =>
Topic.create({
key: createKey(),
parentKey: topicKey,
content
})
);
}
const childKeys = topicArray
.filter(s => s.parentKey === topicKey)
.map(s => s.key);
model = model.withMutations(model => {
topicArray.forEach(topic => {
model.update('topics', topics_ => topics_.set(topic.key, topic));
});
model.updateIn(['topics', topicKey, 'subKeys'], subKeys =>
addAtFront ? subKeys.unshift(...childKeys) : subKeys.push(...childKeys)
);
});
model = focusTopic({
model,
topicKey: childKeys[childKeys.length - 1],
focusMode: FocusMode.EDITING_CONTENT
});
return model;
}
function addMultiTopics({ model, topics }) {
model = model.withMutations(model => {
topics.forEach(topic => {
model.update('topics', topics_ => topics_.set(topic.key, topic));
});
});
return model;
}
export const SheetModelModifier = {
addChild,
addSibling,
addMultiTopics,
addMultiChild,
addMultiSibling,
toggleCollapse,
collapseAll,
expandAll,
expandTo,
focusTopic,
topicContentToPlainText,
setFocusMode,
deleteTopic,
deleteTopics,
setTopicBlockData,
setTopicBlockContentData,
deleteTopicBlock,
setStyle,
clearAllCustomStyle,
setConfig,
setTheme,
setLayoutDir,
setEditorRootTopicKey,
setZoomFactor,
startEditingContent,
startEditingDesc,
dragAndDrop,
swapUp,
swapDown
}; | the_stack |
* Remove these grammars bundled in vscode
*/
export const vscodeGrammarsToRemove = [
'html-derivative',
'ignore',
'MagicRegExp',
'platform',
'sassdoc',
'searchResult',
'log',
'cuda-cpp',
'cshtml',
'jsdoc.js.injection',
'jsdoc.ts.injection',
'md-math',
'md-math-inline',
'md-math-block',
'cpp-grammar-bailout',
'markdown-latex-combined'
]
/**
* Rename these grammars bundled in vscode
*/
export const vscodeGrammarsToRename = {
'asp-vb-net': 'vb',
batchfile: 'bat',
coffeescript: 'coffee',
'cpp.embedded.macro': 'cpp-macro',
JavaScriptReact: 'jsx',
MagicPython: 'python',
'shell-unix-bash': 'shellscript',
TypeScriptReact: 'tsx',
'fortran_free-form': 'fortran',
'objective-c++': 'objective-cpp',
perl6: 'raku'
}
/**
* All language grammar sources on github.com.
*
* To add one:
* - Search `<lang> textmate` on GitHub
* - Search `<lang>` on VS Code Marketplace
* - Pick the most recently updated fork that contains the grammar
* - Add the URL to the end
* - Run `yarn update:grammars`, examine the changes
*
* The grammar id is normalized from the `name` key of the grammar json file.
* When the grammar provides an undesirable name (or no `name` key), for example `x86 and x86_64 Assembly` at
* https://github.com/13xforever/x86_64-assembly-vscode/blob/face834a56e416230c2d20939f9fa77c25344865/syntaxes/language-x86_64-assembly.tmLanguage#L13-L14,
* provide an array like `['asm', '<url>']` to name the language `asm`
*/
export const githubGrammarSources: (string | [string, string])[] = [
'https://github.com/prisma-labs/vscode-graphql/blob/master/grammars/graphql.json',
['haml', 'https://github.com/karuna/haml-vscode/blob/master/syntaxes/haml.json'],
'https://github.com/James-Yu/LaTeX-Workshop/blob/master/syntax/TeX.tmLanguage.json',
'https://github.com/James-Yu/LaTeX-Workshop/blob/master/syntax/LaTeX.tmLanguage.json',
'https://github.com/TheRealSyler/vscode-sass-indented/blob/master/syntaxes/sass.tmLanguage.json',
'https://github.com/d4rkr00t/language-stylus/blob/master/syntaxes/stylus.json',
'https://github.com/textmate/toml.tmbundle/blob/master/Syntaxes/TOML.tmLanguage',
'https://github.com/vuejs/vetur/blob/master/syntaxes/vue-generated.json',
'https://github.com/vuejs/vetur/blob/master/syntaxes/vue-html.tmLanguage.json',
'https://github.com/vuejs/vetur/blob/master/syntaxes/vue-postcss.json',
[
'asm',
'https://github.com/13xforever/x86_64-assembly-vscode/blob/master/syntaxes/language-x86_64-assembly.tmLanguage'
],
'https://github.com/mathworks/MATLAB-Language-grammar/blob/40d9a0cd3b628f80cdcf948bbe1747a527ed5dd5/Matlab.tmbundle/Syntaxes/MATLAB.tmLanguage',
['sas', 'https://github.com/rpardee/sas/blob/master/syntaxes/sas.tmLanguage'],
'https://github.com/Pure-D/code-d/blob/master/syntaxes/d.json',
'https://github.com/Dart-Code/Dart-Code/blob/master/syntaxes/dart.json',
['plsql', 'https://github.com/zabel-xyz/plsql-language/blob/master/syntaxes/plsql.tmLanguage'],
'https://github.com/textmate/logo.tmbundle/blob/master/Syntaxes/Logo.tmLanguage',
'https://github.com/alefragnani/vscode-language-pascal/blob/master/syntaxes/pascal.tmLanguage',
'https://github.com/spgennard/vscode_cobol/blob/master/syntaxes/COBOL.tmLanguage.json',
'https://github.com/mathiasfrohlich/vscode-kotlin/blob/master/syntaxes/Kotlin.tmLanguage',
'https://github.com/scala/vscode-scala-syntax/blob/master/syntaxes/Scala.tmLanguage.json',
'https://github.com/pvl/abap.tmbundle/blob/master/Syntaxes/ABAP.tmLanguage',
'https://github.com/sjhuangx/vscode-scheme/blob/master/syntaxes/scheme.tmLanguage',
[
'prolog',
'https://github.com/arthwang/vsc-prolog/blob/master/syntaxes/prolog.swi.tmLanguage.json'
],
['cue', 'https://github.com/cue-sh/vscode-cue/blob/master/syntaxes/cue.tmLanguage.json'],
'https://github.com/AdaCore/ada_language_server/blob/master/integration/vscode/ada/advanced/ada.tmLanguage.json',
'https://github.com/mattn/vscode-lisp/blob/master/syntaxes/Lisp.tmLanguage',
'https://github.com/forcedotcom/apex-tmLanguage/blob/main/grammars/apex.tmLanguage',
'https://github.com/octref/language-haskell/blob/master/syntaxes/haskell.json',
'https://github.com/wholroyd/vscode-hcl/blob/develop/syntaxes/hcl.json',
'https://github.com/slackhq/vscode-hack/blob/master/syntaxes/hack.json',
'https://github.com/luggage66/vscode-awk/blob/master/syntaxes/awk.tmLanguage',
'https://github.com/BowlerHatLLC/vscode-as3mxml/blob/master/distribution/src/assembly/syntaxes/AS3.tmLanguage',
'https://github.com/sleutho/tcl/blob/master/syntaxes/tcl.tmLanguage',
'https://github.com/reasonml-editor/vscode-reasonml/blob/master/syntaxes/ocaml.json',
'https://github.com/dunstontc/viml/blob/master/syntaxes/viml.tmLanguage.json',
'https://github.com/octref/puppet-vscode/blob/main/syntaxes/puppet.tmLanguage',
'https://github.com/heptio/vscode-jsonnet/blob/master/syntaxes/jsonnet.tmLanguage.json',
'https://github.com/leocamello/vscode-smalltalk/blob/master/syntaxes/smalltalk.tmLanguage.json',
'https://github.com/crystal-lang-tools/vscode-crystal-lang/blob/master/syntaxes/crystal.json',
['wasm', 'https://github.com/wasmerio/vscode-wasm/blob/master/syntaxes/wat.json'],
'https://github.com/bbenoist/vscode-nix/blob/master/syntaxes/nix.tmLanguage',
'https://github.com/elm-tooling/elm-language-client-vscode/blob/master/syntaxes/elm-syntax.json',
'https://github.com/nwolverson/vscode-language-purescript/blob/master/syntaxes/purescript.json',
[
'svelte',
'https://github.com/sveltejs/language-tools/blob/master/packages/svelte-vscode/syntaxes/svelte.tmLanguage.src.yaml'
],
'https://github.com/samuelcolvin/jinjahtml-vscode/blob/master/syntaxes/jinja.tmLanguage.json',
'https://github.com/samuelcolvin/jinjahtml-vscode/blob/master/syntaxes/jinja-html.tmLanguage.json',
'https://github.com/wenyan-lang/highlight/blob/master/wenyan.tmLanguage.json',
'https://github.com/elixir-editors/elixir-tmbundle/blob/master/Syntaxes/Elixir.tmLanguage',
'https://github.com/erlang-ls/grammar/blob/main/Erlang.plist',
'https://github.com/textmate/applescript.tmbundle/blob/master/Syntaxes/AppleScript.tmLanguage',
[
'erb',
'https://github.com/textmate/ruby.tmbundle/blob/master/Syntaxes/HTML%20(Ruby%20-%20ERB).tmLanguage'
],
'https://github.com/textmate/ssh-config.tmbundle/blob/master/Syntaxes/SSH-Config.tmLanguage',
[
'razor',
'https://github.com/dotnet/aspnetcore-tooling/blob/master/src/Razor/src/Microsoft.AspNetCore.Razor.VSCode.Extension/syntaxes/aspnetcorerazor.tmLanguage.json'
],
'https://github.com/MarioSchwalbe/vscode-gnuplot/blob/master/syntaxes/gnuplot.tmLanguage',
'https://github.com/silvenon/vscode-mdx/blob/master/syntaxes/mdx.tmLanguage.json',
'https://github.com/kimmolinna/vscode-apl-language/blob/master/syntaxes/apl.tmLanguage.json',
['apache', 'https://github.com/colinta/ApacheConf.tmLanguage/blob/master/ApacheConf.tmLanguage'],
'https://github.com/mshr-h/vscode-verilog-hdl-support/blob/master/syntaxes/verilog.tmLanguage',
[
'system-verilog',
'https://github.com/mshr-h/vscode-verilog-hdl-support/blob/master/syntaxes/systemverilog.tmLanguage'
],
'https://github.com/jonasjj/awesome-vhdl/blob/master/syntaxes/vhdl.tmLanguage',
'https://github.com/juanfranblanco/vscode-solidity/blob/master/syntaxes/solidity.json',
[
'riscv',
'https://github.com/zhuanhao-wu/vscode-riscv-support/blob/master/syntaxes/riscv.tmLanguage'
],
[
'gherkin',
'https://github.com/alexkrechik/VSCucumberAutoComplete/blob/master/gclient/syntaxes/feature.tmLanguage'
],
'https://github.com/hangxingliu/vscode-nginx-conf-hint/blob/master/src/syntax/nginx.tmLanguage',
'https://github.com/pragmagic/vscode-nim/blob/master/syntaxes/nim.json',
[
'twig',
'https://github.com/nalabdou/Symfony-code-snippets/blob/master/syntaxes/twig.tmLanguage'
],
'https://github.com/stardog-union/stardog-vsc/blob/master/stardog-rdf-grammars/syntaxes/turtle.tmLanguage.json',
'https://github.com/stardog-union/stardog-vsc/blob/master/stardog-rdf-grammars/syntaxes/sparql.tmLanguage.json',
'https://github.com/prisma/language-tools/blob/master/packages/vscode/syntaxes/prisma.tmLanguage.json',
'https://github.com/StoneCypher/sublime-jssm/blob/master/jssm.tmLanguage',
'https://github.com/gbasood/vscode-atomic-dreams/blob/master/syntaxes/dm.tmLanguage.json',
'https://github.com/bmalehorn/vscode-fish/blob/master/syntaxes/fish.tmLanguage.json',
'https://github.com/withastro/astro/blob/main/tools/vscode/syntaxes/astro.tmLanguage.json',
['csharp', 'https://github.com/dotnet/csharp-tmLanguage/blob/main/grammars/csharp.tmLanguage'],
[
'ballerina',
'https://github.com/ballerina-platform/ballerina-grammar/blob/master/syntaxes/ballerina.tmLanguage'
],
'https://github.com/underlay/vscode-tasl/blob/main/syntaxes/tasl.tmLanguage.json',
['codeql', 'https://github.com/github/vscode-codeql/blob/main/syntaxes/ql.tmLanguage.json'],
['bicep', 'https://github.com/Azure/bicep/blob/main/src/textmate/bicep.tmlanguage'],
[
'berry',
'https://github.com/berry-lang/berry/blob/master/tools/plugins/vscode/skiars.berry-0.1.0/syntaxes/berry.json'
]
]
/**
* Aliases to export, so one can use `md` as well as `markdown` to highlight
* markdown content
*/
export const languageAliases = {
bat: ['batch'],
berry: ['be'],
clojure: ['clj'],
csharp: ['c#'],
fsharp: ['f#'],
handlebars: ['hbs'],
javascript: ['js'],
jssm: ['fsl'],
make: ['makefile'],
markdown: ['md'],
'objective-c': ['objc'],
powershell: ['ps', 'ps1'],
pug: ['jade'],
python: ['py'],
raku: ['perl6'],
ruby: ['rb'],
rust: ['rs'],
'html-ruby-erb': ['erb'],
shaderlab: ['shader'],
shellscript: ['shell', 'bash', 'sh', 'zsh'],
stylus: ['styl'],
typescript: ['ts'],
vb: ['cmd'],
viml: ['vim', 'vimscript'],
wenyan: ['文言'],
codeql: ['ql']
}
/**
* Embedded languages excluded from exporting
* Users should use languages that embed them
*/
export const embeddedLanguagesToExclude = [
// `jinja-html` instead
'jinja',
// `php` instead
'php-html',
// embedded by `cpp`
'cpp-macro'
] | the_stack |
import * as fs from 'async-file'
import { Subject } from 'await-notify'
import { assert } from 'console'
import * as net from 'net'
import { homedir } from 'os'
import * as path from 'path'
import { uuid } from 'uuidv4'
import * as vscode from 'vscode'
import * as rpc from 'vscode-jsonrpc/node'
import * as vslc from 'vscode-languageclient/node'
import { onSetLanguageClient } from '../extension'
import * as jlpkgenv from '../jlpkgenv'
import { switchEnvToPath } from '../jlpkgenv'
import { JuliaExecutablesFeature } from '../juliaexepath'
import * as telemetry from '../telemetry'
import { generatePipeName, getVersionedParamsAtPosition, inferJuliaNumThreads, registerCommand, setContext } from '../utils'
import { VersionedTextDocumentPositionParams } from './misc'
import * as modules from './modules'
import * as plots from './plots'
import { showProfileResult, showProfileResultFile } from './profiler'
import * as results from './results'
import { Frame, openFile } from './results'
let g_context: vscode.ExtensionContext = null
let g_languageClient: vslc.LanguageClient = null
let g_compiledProvider = null
let g_terminal: vscode.Terminal = null
export let g_connection: rpc.MessageConnection = undefined
let g_juliaExecutablesFeature: JuliaExecutablesFeature
function startREPLCommand() {
telemetry.traceEvent('command-startrepl')
startREPL(false, true)
}
function is_remote_env(): boolean {
return typeof vscode.env.remoteName !== 'undefined'
}
function get_editor(): string {
const editor: string | null = vscode.workspace.getConfiguration('julia').get('editor')
if (editor) {
return editor
}
if (is_remote_env()) {
if (vscode.env.appName === 'Code - OSS') {
return 'code-server'
} else {
return `"${process.execPath}"`
}
}
return vscode.env.appName.includes('Insiders') ? 'code-insiders' : 'code'
}
function isConnected() {
return Boolean(g_connection)
}
async function startREPL(preserveFocus: boolean, showTerminal: boolean = true) {
if (isConnected()) {
if (g_terminal && showTerminal) {
g_terminal.show(preserveFocus)
}
return
}
const config = vscode.workspace.getConfiguration('julia')
if (g_terminal === null) {
const pipename = generatePipeName(uuid(), 'vsc-jl-repl')
const startupPath = path.join(g_context.extensionPath, 'scripts', 'terminalserver', 'terminalserver.jl')
// remember to change ../../scripts/terminalserver/terminalserver.jl when adding/removing args here:
function getArgs() {
const jlarg2 = [startupPath, pipename, telemetry.getCrashReportingPipename()]
jlarg2.push(`USE_REVISE=${config.get('useRevise')}`)
jlarg2.push(`USE_PLOTPANE=${config.get('usePlotPane')}`)
jlarg2.push(`USE_PROGRESS=${config.get('useProgressFrontend')}`)
jlarg2.push(`DEBUG_MODE=${Boolean(process.env.DEBUG_MODE)}`)
return jlarg2
}
const env = {
JULIA_EDITOR: get_editor(),
JULIA_NUM_THREADS: inferJuliaNumThreads()
}
const pkgServer: string = config.get('packageServer')
if (pkgServer.length !== 0) {
env['JULIA_PKG_SERVER'] = pkgServer
}
const juliaIsConnectedPromise = startREPLMsgServer(pipename)
const juliaExecutable = await g_juliaExecutablesFeature.getActiveJuliaExecutableAsync()
let jlarg1: string[]
const pkgenvpath = await jlpkgenv.getAbsEnvPath()
if (pkgenvpath === null) {
jlarg1 = ['-i', '--banner=no'].concat(config.get('additionalArgs'))
} else {
const env_file_paths = await jlpkgenv.getProjectFilePaths(pkgenvpath)
let sysImageArgs = []
if (config.get('useCustomSysimage') && env_file_paths.sysimage_path && env_file_paths.project_toml_path && env_file_paths.manifest_toml_path) {
const date_sysimage = await fs.stat(env_file_paths.sysimage_path)
const date_manifest = await fs.stat(env_file_paths.manifest_toml_path)
if (date_sysimage.mtime > date_manifest.mtime) {
sysImageArgs = ['-J', env_file_paths.sysimage_path]
}
else {
vscode.window.showWarningMessage('Julia sysimage for this environment is out-of-date and not used for REPL.')
}
}
jlarg1 = ['-i', '--banner=no', `--project=${pkgenvpath}`].concat(sysImageArgs).concat(config.get('additionalArgs'))
}
if (Boolean(config.get('persistentSession.enabled'))) {
const shellPath: string = config.get('persistentSession.shell')
const connectJuliaCode = juliaConnector(pipename)
const sessionName = config.get('persistentSession.tmuxSessionName')
const tmuxArgs = [
<string>config.get('persistentSession.shellExecutionArgument'),
// create a new tmux session, set remain-on-exit to true, and attach; if the session already exists we just attach to the existing session
`tmux new -d -s ${sessionName} ${juliaExecutable.file} ${[...juliaExecutable.args, ...jlarg1, ...getArgs()].join(' ')} && tmux set -q remain-on-exit && tmux attach -t ${sessionName} ||
tmux send-keys -t ${sessionName}.left ^A ^K ^H '${connectJuliaCode}' ENTER && tmux attach -t ${sessionName}`
]
g_terminal = vscode.window.createTerminal({
name: 'Julia REPL',
shellPath: shellPath,
shellArgs: tmuxArgs,
env: env
})
} else {
g_terminal = vscode.window.createTerminal({
name: 'Julia REPL',
shellPath: juliaExecutable.file,
shellArgs: [...juliaExecutable.args, ...jlarg1, ...getArgs()],
env: env
})
}
g_terminal.show(preserveFocus)
await juliaIsConnectedPromise.wait()
} else if (showTerminal) {
g_terminal.show(preserveFocus)
}
}
function juliaConnector(pipename: string, start = false) {
const connect = `VSCodeServer.serve(raw"${pipename}"; is_dev = "DEBUG_MODE=true" in Base.ARGS, crashreporting_pipename = raw"${telemetry.getCrashReportingPipename()}");nothing # re-establishing connection with VSCode`
if (start) {
return `pushfirst!(LOAD_PATH, raw"${path.join(g_context.extensionPath, 'scripts', 'packages')}");using VSCodeServer;popfirst!(LOAD_PATH);` + connect
} else {
return connect
}
}
async function connectREPL() {
const pipename = generatePipeName(uuid(), 'vsc-jl-repl')
const juliaIsConnectedPromise = startREPLMsgServer(pipename)
const connectJuliaCode = juliaConnector(pipename, true)
const click = await vscode.window.showInformationMessage('Start a Julia session, and execute the code copied into your clipboard by the button below into it.', 'Copy code')
if (click === 'Copy code') {
vscode.env.clipboard.writeText(connectJuliaCode)
try {
await juliaIsConnectedPromise.wait()
vscode.window.showInformationMessage('Successfully connected to external Julia REPL.')
} catch (err) {
vscode.window.showErrorMessage('Failed to connect to external Julia REPL.')
}
}
}
function killREPL() {
if (isConnected()) {
g_connection.end()
}
if (g_terminal) {
g_terminal.dispose()
}
}
function debuggerRun(params: DebugLaunchParams) {
vscode.debug.startDebugging(undefined, {
type: 'julia',
request: 'attach',
name: 'Julia REPL',
code: params.code,
file: params.filename,
stopOnEntry: false,
compiledModulesOrFunctions: g_compiledProvider.getCompiledItems(),
compiledMode: g_compiledProvider.compiledMode
})
}
function debuggerEnter(params: DebugLaunchParams) {
vscode.debug.startDebugging(undefined, {
type: 'julia',
request: 'attach',
name: 'Julia REPL',
code: params.code,
file: params.filename,
stopOnEntry: true,
compiledModulesOrFunctions: g_compiledProvider.getCompiledItems(),
compiledMode: g_compiledProvider.compiledMode
})
}
interface ReturnResult {
inline: string,
all: string,
stackframe: null | Array<Frame>
}
const requestTypeReplRunCode = new rpc.RequestType<{
filename: string,
line: number,
column: number,
code: string,
mod: string,
showCodeInREPL: boolean,
showResultInREPL: boolean,
showErrorInREPL: boolean,
softscope: boolean
}, ReturnResult, void>('repl/runcode')
interface DebugLaunchParams {
code: string,
filename: string
}
export const notifyTypeDisplay = new rpc.NotificationType<{ kind: string, data: any }>('display')
const notifyTypeDebuggerEnter = new rpc.NotificationType<DebugLaunchParams>('debugger/enter')
const notifyTypeDebuggerRun = new rpc.NotificationType<DebugLaunchParams>('debugger/run')
const notifyTypeReplStartDebugger = new rpc.NotificationType<{ debugPipename: string }>('repl/startdebugger')
const notifyTypeReplStartEval = new rpc.NotificationType<void>('repl/starteval')
export const notifyTypeReplFinishEval = new rpc.NotificationType<void>('repl/finisheval')
export const notifyTypeReplShowInGrid = new rpc.NotificationType<{ code: string }>('repl/showingrid')
const notifyTypeShowProfilerResult = new rpc.NotificationType<{ content: string }>('repl/showprofileresult')
const notifyTypeShowProfilerResultFile = new rpc.NotificationType<{ filename: string }>('repl/showprofileresult_file')
interface Progress {
id: { value: number },
name: string,
fraction: number,
done: Boolean
}
const notifyTypeProgress = new rpc.NotificationType<Progress>('repl/updateProgress')
const g_onInit = new vscode.EventEmitter<rpc.MessageConnection>()
export const onInit = g_onInit.event
const g_onExit = new vscode.EventEmitter<Boolean>()
export const onExit = g_onExit.event
const g_onStartEval = new vscode.EventEmitter<null>()
export const onStartEval = g_onStartEval.event
const g_onFinishEval = new vscode.EventEmitter<null>()
export const onFinishEval = g_onFinishEval.event
// code execution start
function startREPLMsgServer(pipename: string) {
const connected = new Subject()
if (g_connection) {
g_connection = undefined
}
const server = net.createServer((socket: net.Socket) => {
socket.on('close', hadError => {
g_onExit.fire(hadError)
g_connection = undefined
server.close()
})
g_connection = rpc.createMessageConnection(
new rpc.StreamMessageReader(socket),
new rpc.StreamMessageWriter(socket)
)
g_connection.listen()
g_onInit.fire(g_connection)
connected.notify()
})
server.listen(pipename)
return connected
}
const g_progress_dict = {}
async function updateProgress(progress: Progress) {
if (g_progress_dict[progress.id.value]) {
const p = g_progress_dict[progress.id.value]
const increment = progress.done ? 100 : (progress.fraction - p.last_fraction) * 100
p.progress.report({
increment: increment,
message: progressMessage(progress, p.started)
})
p.last_fraction = progress.fraction
if (progress.done) {
p.resolve()
delete g_progress_dict[progress.id.value]
}
} else {
vscode.window.withProgress({
location: vscode.ProgressLocation.Window,
title: 'Julia',
cancellable: true
}, (prog, token) => {
return new Promise(resolve => {
g_progress_dict[progress.id.value] = {
progress: prog,
last_fraction: progress.fraction,
started: new Date(),
resolve: resolve,
}
token.onCancellationRequested(ev => {
interrupt()
})
prog.report({
message: progressMessage(progress)
})
})
})
}
}
function progressMessage(prog: Progress, started = null) {
let message = prog.name
const parenthezise = message.trim().length > 0
if (!isNaN(prog.fraction) && 0 <= prog.fraction && prog.fraction <= 1) {
if (parenthezise) {
message += ' ('
}
message += `${(prog.fraction * 100).toFixed(1)}%`
if (started !== null) {
const elapsed = ((new Date()).valueOf() - started) / 1000
const remaining = (1 / prog.fraction - 1) * elapsed
message += ` - ${formattedTimePeriod(remaining)} remaining`
}
if (parenthezise) {
message += ')'
}
}
return message
}
function formattedTimePeriod(t) {
const seconds = Math.floor(t % 60)
const minutes = Math.floor(t / 60 % 60)
const hours = Math.floor(t / 60 / 60)
let out = ''
if (hours > 0) {
out += `${hours}h, `
}
if (minutes > 0) {
out += `${minutes}min, `
}
out += `${seconds}s`
return out
}
function clearProgress() {
for (const id in g_progress_dict) {
g_progress_dict[id].resolve()
delete g_progress_dict[id]
}
}
function display(params: { kind: string, data: any }) {
if (params.kind === 'application/vnd.julia-vscode.diagnostics') {
displayDiagnostics(params.data)
} else {
plots.displayPlot(params)
}
}
interface diagnosticData {
msg: string,
path: string,
line?: number,
range?: number[][],
severity: number,
relatedInformation?: {
msg: string,
path: string,
line?: number,
range?: number[][]
}[]
}
const g_trace_diagnostics: Map<string, vscode.DiagnosticCollection> = new Map()
function displayDiagnostics(data: { source: string, items: diagnosticData[] }) {
const source = data.source
if (g_trace_diagnostics.has(source)) {
g_trace_diagnostics.get(source).clear()
} else {
g_trace_diagnostics.set(source, vscode.languages.createDiagnosticCollection('Julia Runtime Diagnostics: ' + source))
}
const items = data.items
if (items.length === 0) {
return _clearDiagnostic(source)
}
const diagnostics = items.map((frame): [vscode.Uri, vscode.Diagnostic[]] => {
const range = frame.range ?
new vscode.Range(frame.range[0][0] - 1, frame.range[0][1], frame.range[1][0] - 1, frame.range[1][1]) :
new vscode.Range(frame.line - 1, 0, frame.line - 1, 99999)
const diagnostic = new vscode.Diagnostic(
range,
frame.msg,
frame.severity === undefined ? vscode.DiagnosticSeverity.Warning : frame.severity
)
if (frame.relatedInformation) {
diagnostic.relatedInformation = frame.relatedInformation.map(stackframe => {
const range = stackframe.range ?
new vscode.Range(stackframe.range[0][0] - 1, stackframe.range[0][1], stackframe.range[1][0] - 1, stackframe.range[1][1]) :
new vscode.Range(stackframe.line - 1, 0, stackframe.line - 1, 99999)
return new vscode.DiagnosticRelatedInformation(
new vscode.Location(vscode.Uri.file(stackframe.path), range),
stackframe.msg
)
})
}
diagnostic.source = source
return [
vscode.Uri.file(frame.path),
[
diagnostic
]
]
})
g_trace_diagnostics.get(source).set(diagnostics)
}
function clearDiagnostics() {
g_trace_diagnostics.forEach((_, source) => _clearDiagnostic(source))
}
function clearDiagnosticsByProvider() {
const sources = Array(...g_trace_diagnostics.keys())
vscode.window.showQuickPick(sources, {
// canPickMany: true, // not work nicely with keyboard shortcuts
title: 'Select sources of diagnostics to filter them out.'
}).then(source => {
if (source) {
_clearDiagnostic(source)
}
})
}
function _clearDiagnostic(source: string) {
const diagnostics = g_trace_diagnostics.get(source)
diagnostics.clear()
diagnostics.dispose()
g_trace_diagnostics.delete(source)
}
async function executeFile(uri?: vscode.Uri | string) {
telemetry.traceEvent('command-executeFile')
const editor = vscode.window.activeTextEditor
await startREPL(true, false)
let module = 'Main'
let path = ''
let code = ''
if (uri && !(uri instanceof vscode.Uri)) {
uri = vscode.Uri.parse(uri)
}
if (uri && uri instanceof vscode.Uri) {
path = uri.fsPath
const readBytes = await vscode.workspace.fs.readFile(uri)
code = Buffer.from(readBytes).toString('utf8')
} else {
if (!editor) {
return
}
path = editor.document.fileName
code = editor.document.getText()
const pos = editor.document.validatePosition(new vscode.Position(0, 1)) // xref: https://github.com/julia-vscode/julia-vscode/issues/1500
module = await modules.getModuleForEditor(editor.document, pos)
}
await g_connection.sendRequest(
requestTypeReplRunCode,
{
filename: path,
line: 0,
column: 0,
mod: module,
code: code,
showCodeInREPL: false,
showResultInREPL: true,
showErrorInREPL: true,
softscope: false
}
)
}
async function getBlockRange(params: VersionedTextDocumentPositionParams) {
const zeroPos = new vscode.Position(0, 0)
const zeroReturn = [zeroPos, zeroPos, params.position]
if (g_languageClient === null) {
vscode.window.showErrorMessage('No LS running or start. Check your settings.')
return zeroReturn
}
await g_languageClient.onReady()
try {
return await g_languageClient.sendRequest('julia/getCurrentBlockRange', params)
} catch (err) {
if (err.message === 'Language client is not ready yet') {
vscode.window.showErrorMessage(err.message)
} else {
console.error(err)
vscode.window.showErrorMessage('Error while communicating with the LS. Check Outputs > Julia Language Server for additional information.')
}
return zeroReturn
}
}
async function selectJuliaBlock() {
telemetry.traceEvent('command-selectCodeBlock')
const editor = vscode.window.activeTextEditor
const position = editor.document.validatePosition(editor.selection.start)
const ret_val = await getBlockRange(getVersionedParamsAtPosition(editor.document, position))
const start_pos = new vscode.Position(ret_val[0].line, ret_val[0].character)
const end_pos = new vscode.Position(ret_val[1].line, ret_val[1].character)
validateMoveAndReveal(editor, start_pos, end_pos)
}
const g_cellDelimiters = [
/^##(?!#)/,
/^#(\s?)%%/
]
function isCellBorder(s: string) {
return g_cellDelimiters.some(regex => regex.test(s))
}
function _nextCellBorder(doc, line_num: number, direction: number) {
assert(direction === 1 || direction === -1)
while (0 <= line_num && line_num < doc.lineCount) {
if (isCellBorder(doc.lineAt(line_num).text)) {
break
}
line_num += direction
}
return line_num
}
const nextCellBorder = (doc, line_num) => _nextCellBorder(doc, line_num, +1)
const prevCellBorder = (doc, line_num) => _nextCellBorder(doc, line_num, -1)
function validateMoveAndReveal(editor: vscode.TextEditor, startpos: vscode.Position, endpos: vscode.Position) {
const doc = editor.document
startpos = doc.validatePosition(startpos)
endpos = doc.validatePosition(endpos)
editor.selection = new vscode.Selection(startpos, endpos)
editor.revealRange(new vscode.Range(startpos, endpos))
}
async function moveCellDown() {
telemetry.traceEvent('command-moveCellDown')
const ed = vscode.window.activeTextEditor
if (ed === undefined) {
return
}
const currline = ed.selection.active.line
const newpos = new vscode.Position(nextCellBorder(ed.document, currline + 1) + 1, 0)
validateMoveAndReveal(ed, newpos, newpos)
}
async function moveCellUp() {
telemetry.traceEvent('command-moveCellUp')
const ed = vscode.window.activeTextEditor
if (ed === undefined) {
return
}
const currline = ed.selection.active.line
const newpos = new vscode.Position(Math.max(0, prevCellBorder(ed.document, currline) - 1), 0)
validateMoveAndReveal(ed, newpos, newpos)
}
function currentCellRange(editor: vscode.TextEditor) {
const doc = editor.document
const currline = editor.selection.active.line
const startline = prevCellBorder(doc, currline) + 1
const endline = nextCellBorder(doc, currline + 1) - 1
const startpos = doc.validatePosition(new vscode.Position(startline, 0))
const endpos = doc.validatePosition(new vscode.Position(endline, doc.lineAt(endline).text.length))
return new vscode.Range(startpos, endpos)
}
async function executeCell(shouldMove: boolean = false) {
telemetry.traceEvent('command-executeCell')
const ed = vscode.window.activeTextEditor
if (ed === undefined) {
return
}
const doc = ed.document
const selection = ed.selection
const cellrange = currentCellRange(ed)
const code = doc.getText(cellrange)
const module: string = await modules.getModuleForEditor(ed.document, cellrange.start)
await startREPL(true, false)
if (shouldMove && ed.selection === selection) {
const nextpos = new vscode.Position(cellrange.end.line + 2, 0)
validateMoveAndReveal(ed, nextpos, nextpos)
}
await evaluate(ed, cellrange, code, module)
}
async function evaluateBlockOrSelection(shouldMove: boolean = false) {
telemetry.traceEvent('command-executeCodeBlockOrSelection')
const editor = vscode.window.activeTextEditor
if (editor === undefined) {
return
}
const selections = editor.selections.slice()
await startREPL(true, false)
for (const selection of selections) {
let range: vscode.Range = null
let nextBlock: vscode.Position = null
const startpos: vscode.Position = editor.document.validatePosition(new vscode.Position(selection.start.line, selection.start.character))
const module: string = await modules.getModuleForEditor(editor.document, startpos)
if (selection.isEmpty) {
const currentBlock = await getBlockRange(getVersionedParamsAtPosition(editor.document, startpos))
const blockStartPos = editor.document.validatePosition(new vscode.Position(currentBlock[0].line, currentBlock[0].character))
const lineEndPos = editor.document.validatePosition(new vscode.Position(currentBlock[1].line, Infinity))
range = new vscode.Range(blockStartPos, lineEndPos)
nextBlock = editor.document.validatePosition(new vscode.Position(currentBlock[2].line, currentBlock[2].character))
} else {
range = new vscode.Range(selection.start, selection.end)
}
const text = editor.document.getText(range)
if (shouldMove && nextBlock && selection.isEmpty && editor.selections.length === 1 && editor.selection === selection) {
validateMoveAndReveal(editor, nextBlock, nextBlock)
}
if (range.isEmpty) {
return
}
const tempDecoration = vscode.window.createTextEditorDecorationType({
backgroundColor: new vscode.ThemeColor('editor.hoverHighlightBackground'),
isWholeLine: true
})
editor.setDecorations(tempDecoration, [range])
setTimeout(() => {
editor.setDecorations(tempDecoration, [])
}, 200)
await evaluate(editor, range, text, module)
}
}
async function evaluate(editor: vscode.TextEditor, range: vscode.Range, text: string, module: string) {
telemetry.traceEvent('command-evaluate')
const section = vscode.workspace.getConfiguration('julia')
const resultType: string = section.get('execution.resultType')
const codeInREPL: boolean = section.get('execution.codeInREPL')
let r: results.Result = null
if (resultType !== 'REPL') {
r = results.addResult(editor, range, ' ⟳ ', '')
}
const result: ReturnResult = await g_connection.sendRequest(
requestTypeReplRunCode,
{
filename: editor.document.fileName,
line: range.start.line,
column: range.start.character,
code: text,
mod: module,
showCodeInREPL: codeInREPL,
showResultInREPL: resultType === 'REPL' || resultType === 'both',
showErrorInREPL: resultType.indexOf('error') > -1,
softscope: true
}
)
if (resultType !== 'REPL') {
if (result.stackframe) {
results.clearStackTrace()
results.setStackTrace(r, result.all, result.stackframe)
}
r.setContent(results.resultContent(' ' + result.inline + ' ', result.all, Boolean(result.stackframe)))
}
}
async function executeCodeCopyPaste(text: string, individualLine: boolean) {
if (!text.endsWith('\n')) {
text = text + '\n'
}
await startREPL(true, true)
let lines = text.split(/\r?\n/)
lines = lines.filter(line => line !== '')
text = lines.join('\n')
if (individualLine || process.platform === 'win32') {
g_terminal.sendText(text + '\n', false)
}
else {
g_terminal.sendText('\u001B[200~' + text + '\n' + '\u001B[201~', false)
}
}
function executeSelectionCopyPaste() {
telemetry.traceEvent('command-executeSelectionCopyPaste')
const editor = vscode.window.activeTextEditor
if (!editor) {
return
}
const selection = editor.selection
const text = selection.isEmpty ? editor.document.lineAt(selection.start.line).text : editor.document.getText(selection)
// If no text was selected, try to move the cursor to the end of the next line
if (selection.isEmpty) {
for (let line = selection.start.line + 1; line < editor.document.lineCount; line++) {
if (!editor.document.lineAt(line).isEmptyOrWhitespace) {
const newPos = selection.active.with(line, editor.document.lineAt(line).range.end.character)
const newSel = new vscode.Selection(newPos, newPos)
editor.selection = newSel
break
}
}
}
executeCodeCopyPaste(text, selection.isEmpty)
}
export async function executeInREPL(code: string, { filename = 'code', line = 0, column = 0, mod = 'Main', showCodeInREPL = true, showResultInREPL = true, showErrorInREPL = false, softscope = true }): Promise<ReturnResult> {
await startREPL(true)
return await g_connection.sendRequest(
requestTypeReplRunCode,
{
filename,
line,
column,
code,
mod,
showCodeInREPL,
showResultInREPL,
showErrorInREPL,
softscope
}
)
}
const interrupts = []
let last_interrupt_index = -1
function interrupt() {
telemetry.traceEvent('command-interrupt')
// always send out internal interrupt
softInterrupt()
// but we'll try sending a SIGINT if more than 3 interrupts were sent in the last second
last_interrupt_index = (last_interrupt_index + 1) % 5
interrupts[last_interrupt_index] = new Date()
const now = new Date()
if (interrupts.filter(x => (now.getTime() - x.getTime()) < 1000).length >= 3) {
signalInterrupt()
}
}
function softInterrupt() {
try {
g_connection.sendNotification('repl/interrupt')
} catch (err) {
console.warn(err)
}
}
function signalInterrupt() {
telemetry.traceEvent('command-signal-interrupt')
try {
if (process.platform !== 'win32') {
g_terminal.processId.then(pid => process.kill(pid, 'SIGINT'))
} else {
console.warn('Signal interrupts are not supported on Windows.')
}
} catch (err) {
console.warn(err)
}
}
// code execution end
async function cdToHere(uri: vscode.Uri) {
telemetry.traceEvent('command-cdHere')
const uriPath = await getDirUriFsPath(uri)
await startREPL(true, false)
if (uriPath) {
try {
g_connection.sendNotification('repl/cd', { uri: uriPath })
} catch (err) {
console.log(err)
}
}
}
async function activateHere(uri: vscode.Uri) {
telemetry.traceEvent('command-activateThisEnvironment')
const uriPath = await getDirUriFsPath(uri)
activatePath(uriPath)
}
async function activatePath(path: string) {
await startREPL(true, false)
if (path) {
try {
g_connection.sendNotification('repl/activateProject', { uri: path })
switchEnvToPath(path, true)
} catch (err) {
console.log(err)
}
}
}
async function activateFromDir(uri: vscode.Uri) {
const uriPath = await getDirUriFsPath(uri)
if (uriPath) {
try {
const target = await searchUpFile('Project.toml', uriPath)
if (!target) {
vscode.window.showWarningMessage(`No project file found for ${uriPath}`)
return
}
activatePath(path.dirname(target))
} catch (err) {
console.log(err)
}
}
}
async function searchUpFile(target: string, from: string): Promise<string> {
const parentDir = path.dirname(from)
if (parentDir === from) {
return undefined // ensure to escape infinite recursion
} else {
const p = path.join(from, target)
return (await fs.exists(p)) ? p : searchUpFile(target, parentDir)
}
}
async function getDirUriFsPath(uri: vscode.Uri | undefined) {
if (!uri) {
const ed = vscode.window.activeTextEditor
if (ed && ed.document && ed.document.uri) {
uri = ed.document.uri
}
}
if (!uri || !uri.fsPath) {
return undefined
}
const uriPath = uri.fsPath
const stat = await fs.stat(uriPath)
if (stat.isFile()) {
return path.dirname(uriPath)
} else if (stat.isDirectory()) {
return uriPath
} else {
return undefined
}
}
async function linkHandler(link: any) {
let { file, line } = link.data
if (file.startsWith('.')) {
// Base file
const exepath = await g_juliaExecutablesFeature.getActiveJuliaExecutableAsync()
file = path.join(await exepath.getBaseRootFolderPathAsync(), file)
} else if (file.startsWith('~')) {
file = path.join(homedir(), file.slice(1))
}
try {
await openFile(file, line)
} catch (err) {
console.debug('This file does not exist.')
}
}
function linkProvider(context: vscode.TerminalLinkContext, token: vscode.CancellationToken) {
const line = context.line
// Can't link to the REPL
if (/\bREPL\[\d+\]/.test(line)) {
return []
}
const match = line.match(/(@\s+(?:[^\s/\\]+\s+)?)(.+?):(\d+)/)
if (match) {
return [
{
startIndex: match.index + match[1].length,
length: match[0].length - match[1].length,
data: {
file: match[2],
line: match[3]
}
}
]
}
return []
}
export async function replStartDebugger(pipename: string) {
await startREPL(true)
g_connection.sendNotification(notifyTypeReplStartDebugger, { debugPipename: pipename })
}
export function activate(context: vscode.ExtensionContext, compiledProvider, juliaExecutablesFeature: JuliaExecutablesFeature) {
g_context = context
g_juliaExecutablesFeature = juliaExecutablesFeature
g_compiledProvider = compiledProvider
context.subscriptions.push(
// listeners
onSetLanguageClient(languageClient => {
g_languageClient = languageClient
}),
onInit(connection => {
connection.onNotification(notifyTypeDisplay, display)
connection.onNotification(notifyTypeDebuggerRun, debuggerRun)
connection.onNotification(notifyTypeDebuggerEnter, debuggerEnter)
connection.onNotification(notifyTypeReplStartEval, () => g_onStartEval.fire(null))
connection.onNotification(notifyTypeReplFinishEval, () => g_onFinishEval.fire(null))
connection.onNotification(notifyTypeShowProfilerResult, showProfileResult)
connection.onNotification(notifyTypeShowProfilerResultFile, showProfileResultFile)
connection.onNotification(notifyTypeProgress, updateProgress)
setContext('isJuliaEvaluating', false)
setContext('hasJuliaREPL', true)
}),
onExit(() => {
results.removeAll()
clearDiagnostics()
setContext('isJuliaEvaluating', false)
setContext('hasJuliaREPL', false)
}),
onStartEval(() => {
updateProgress({
name: 'Evaluating…',
id: { value: -1 },
fraction: -1,
done: false
})
setContext('isJuliaEvaluating', true)
}),
onFinishEval(() => {
clearProgress()
setContext('isJuliaEvaluating', false)
}),
vscode.workspace.onDidChangeConfiguration(event => {
if (event.affectsConfiguration('julia.usePlotPane')) {
try {
g_connection.sendNotification('repl/togglePlotPane', { enable: vscode.workspace.getConfiguration('julia').get('usePlotPane') })
} catch (err) {
console.warn(err)
}
} else if (event.affectsConfiguration('julia.useProgressFrontend')) {
try {
g_connection.sendNotification('repl/toggleProgress', { enable: vscode.workspace.getConfiguration('julia').get('useProgressFrontend') })
} catch (err) {
console.warn(err)
}
} else if (event.affectsConfiguration('julia.showRuntimeDiagnostics')) {
try {
g_connection.sendNotification('repl/toggleDiagnostics', { enable: vscode.workspace.getConfiguration('julia').get('showRuntimeDiagnostics') })
} catch (err) {
console.warn(err)
}
}
}),
vscode.window.onDidChangeActiveTerminal(terminal => {
if (terminal === g_terminal) {
setContext('isJuliaREPL', true)
} else {
setContext('isJuliaREPL', false)
}
}),
vscode.window.onDidCloseTerminal(terminal => {
if (terminal === g_terminal) {
g_terminal = null
}
}),
// link handler
vscode.window.registerTerminalLinkProvider({
provideTerminalLinks: linkProvider,
handleTerminalLink: linkHandler
}),
// commands
registerCommand('language-julia.startREPL', startREPLCommand),
registerCommand('language-julia.connectREPL', connectREPL),
registerCommand('language-julia.stopREPL', killREPL),
registerCommand('language-julia.selectBlock', selectJuliaBlock),
registerCommand('language-julia.executeCodeBlockOrSelection', evaluateBlockOrSelection),
registerCommand('language-julia.executeCodeBlockOrSelectionAndMove', () => evaluateBlockOrSelection(true)),
registerCommand('language-julia.executeCell', executeCell),
registerCommand('language-julia.executeCellAndMove', () => executeCell(true)),
registerCommand('language-julia.moveCellUp', moveCellUp),
registerCommand('language-julia.moveCellDown', moveCellDown),
registerCommand('language-julia.executeActiveFile', () => executeFile()),
registerCommand('language-julia.executeFile', executeFile),
registerCommand('language-julia.interrupt', interrupt),
registerCommand('language-julia.executeJuliaCodeInREPL', executeSelectionCopyPaste), // copy-paste selection into REPL. doesn't require LS to be started
registerCommand('language-julia.cdHere', cdToHere),
registerCommand('language-julia.activateHere', activateHere),
registerCommand('language-julia.activateFromDir', activateFromDir),
registerCommand('language-julia.clearRuntimeDiagnostics', clearDiagnostics),
registerCommand('language-julia.clearRuntimeDiagnosticsByProvider', clearDiagnosticsByProvider),
)
const terminalConfig = vscode.workspace.getConfiguration('terminal.integrated')
const shellSkipCommands: Array<String> = terminalConfig.get('commandsToSkipShell')
if (shellSkipCommands.indexOf('language-julia.interrupt') === -1) {
shellSkipCommands.push('language-julia.interrupt')
terminalConfig.update('commandsToSkipShell', shellSkipCommands, true)
}
results.activate(context)
plots.activate(context)
modules.activate(context)
} | the_stack |
import { Schema } from '@wordpress/api-fetch';
import { EditorSettings } from '@wordpress/block-editor';
import { BlockInstance } from '@wordpress/blocks';
import { Autosave } from '@wordpress/core-data';
export {
canInsertBlockType,
getAdjacentBlockClientId,
getBlock,
getBlockAttributes,
getBlockCount,
getBlockHierarchyRootClientId,
getBlockIndex,
getBlockInsertionPoint,
getBlockListSettings,
getBlockMode,
getBlockName,
getBlockOrder,
getBlockRootClientId,
getBlockSelectionEnd,
getBlockSelectionStart,
getBlocks,
getBlocksByClientId,
getClientIdsOfDescendants,
getClientIdsWithDescendants,
getFirstMultiSelectedBlockClientId,
getGlobalBlockCount,
getInserterItems,
getLastMultiSelectedBlockClientId,
getMultiSelectedBlockClientIds,
getMultiSelectedBlocks,
getMultiSelectedBlocksEndClientId,
getMultiSelectedBlocksStartClientId,
getNextBlockClientId,
getPreviousBlockClientId,
getSelectedBlock,
getSelectedBlockClientId,
getSelectedBlockCount,
getSelectedBlocksInitialCaretPosition,
getTemplate,
getTemplateLock,
hasInserterItems,
hasMultiSelection,
hasSelectedBlock,
hasSelectedInnerBlock,
isAncestorMultiSelected,
isBlockInsertionPointVisible,
isBlockMultiSelected,
isBlockSelected,
isBlockValid,
isBlockWithinSelection,
isCaretWithinFormattedText,
isFirstMultiSelectedBlock,
isMultiSelecting,
isSelectionEnabled,
isTyping,
isValidTemplate,
} from '@wordpress/block-editor/store/selectors';
/**
* Returns whether or not the user has the unfiltered_html capability.
*
* @returns Whether the user can or can't post unfiltered HTML.
*/
export function canUserUseUnfilteredHTML(): boolean;
/**
* Returns `true` if a previous post save was attempted but failed, or `false` otherwise.
*
* @returns Whether the post save failed.
*/
export function didPostSaveRequestFail(): boolean;
/**
* Returns `true` if a previous post save was attempted successfully, or `false` otherwise.
*
* @returns Whether the post was saved successfully.
*/
export function didPostSaveRequestSucceed(): boolean;
/**
* Returns the active post lock.
*/
export function getActivePostLock(): string | undefined;
/**
* Returns the current autosave, or an empty object if one is not set (i.e. if the post has yet to
* be autosaved, or has been saved or published since the last autosave).
*
* @deprecated since 5.6. Callers should use the `getAutosave( postType, postId, userId )`
* selector from the '@wordpress/core-data' package.
*/
export function getAutosave(): Autosave | {};
/**
* Returns an attribute value of the current autosave revision for a post, or an empty object if
* there is no autosave for the post.
*
* @deprecated since 5.6. Callers should use the `getAutosave( postType, postId, userId )` selector
* from the '@wordpress/core-data' package and access properties on the returned
* autosave object using getPostRawValue.
*
* @param attributeName - Autosave attribute name.
*/
export function getAutosaveAttribute<T extends keyof Autosave>(attributeName: T): Autosave[T] | {};
/**
* Returns a set of blocks which are to be used in consideration of the post's generated save
* content.
*
* @returns Filtered set of blocks for save.
*/
export function getBlocksForSerialization(): BlockInstance[];
/**
* Returns the post currently being edited in its last known saved state, not including unsaved
* edits. Returns an object containing relevant default post values if the post has not yet been
* saved.
*/
export function getCurrentPost(): Schema.Decontextualize<Schema.PostOrPage<'edit'>>;
/**
* Returns an attribute value of the saved post.
*
* @param attributeName - Post attribute name.
*/
export function getCurrentPostAttribute<T extends keyof Schema.PostOrPage<'edit'>>(
attributeName: T
): Schema.Decontextualize<Schema.PostOrPage<'edit'>>[T] | undefined;
/**
* Returns the ID of the post currently being edited.
*/
export function getCurrentPostId(): number;
/**
* Returns the last revision ID of the post currently being edited, or `null` if the post has no
* revisions.
*/
export function getCurrentPostLastRevisionId(): number | null;
/**
* Returns the number of revisions of the post currently being edited.
*/
export function getCurrentPostRevisionsCount(): number;
/**
* Returns the post type of the post currently being edited.
*/
export function getCurrentPostType(): string;
/**
* Returns a single attribute of the post being edited, preferring the unsaved edit if one exists,
* but falling back to the attribute for the last known saved state of the post.
*
* @param attributeName - Post attribute name.
*/
export function getEditedPostAttribute<T extends keyof Schema.PostOrPage<'edit'>>(
attributeName: T
): Schema.Decontextualize<Schema.PostOrPage<'edit'>>[T] | undefined;
/**
* Returns the content of the post being edited, preferring raw string edit before falling back to
* serialization of block state.
*/
export function getEditedPostContent(): string;
/**
* Returns the post preview link.
*/
export function getEditedPostPreviewLink(): string | null;
/**
* Returns the current visibility of the post being edited, preferring the unsaved value if
* different than the saved post. The return value is one of `"private"`, `"password"`, or `"public"`.
*/
export function getEditedPostVisibility(): 'password' | 'private' | 'public';
/**
* Return the current block list.
*/
export function getEditorBlocks(): BlockInstance[];
/**
* Returns the post editor settings.
*/
export function getEditorSettings(): EditorSettings;
/**
* Returns the permalink for the post.
*
* @returns The permalink, or `null` if the post is not viewable.
*/
export function getPermalink(): string | null;
/**
* Returns the permalink for a post, split into it's three parts: the prefix, the postName, and the
* suffix.
*
* @returns An object containing the prefix, postName, and suffix for the permalink, or `null` if
* the post is not viewable.
*/
export function getPermalinkParts(): { postName: string; prefix: string; suffix?: string } | null;
/**
* Returns any post values which have been changed in the editor but not yet been saved.
*
* @returns Object of key value pairs comprising unsaved edits.
*/
export function getPostEdits(): Partial<Schema.Decontextualize<Schema.PostOrPage<'edit'>>>;
/**
* Returns details about the post lock user.
*/
export function getPostLockUser(): Schema.User | undefined | null;
/**
* Returns a new reference when edited values have changed. This is useful in inferring where an
* edit has been made between states by comparison of the return values using strict equality.
*
* @example
* ```js
* const hasEditOccurred = (
* getReferenceByDistinctEdits( beforeState ) !==
* getReferenceByDistinctEdits( afterState )
* );
* ```
*
* @returns A value whose reference will change only when an edit occurs.
*/
export function getReferenceByDistinctEdits(): [];
/**
* Returns state object prior to a specified optimist transaction ID, or `null` if the transaction
* corresponding to the given ID cannot be found.
*
* @param transactionId - Optimist transaction ID.
*
* @returns Global application state prior to transaction.
*/
export function getStateBeforeOptimisticTransaction(transactionId: object): any;
/**
* Returns a suggested post format for the current post, inferred only if there is a single block
* within the post and it is of a type known to match a default post format. Returns `null` if the
* format cannot be determined.
*/
export function getSuggestedPostFormat(): string | null;
/**
* Returns the true if there is an existing autosave, otherwise false.
*
* @deprecated since 5.6. Callers should use the `getAutosave( postType, postId, userId )` selector
* from the '@wordpress/core-data' package and check for a truthy value.
*/
export function hasAutosave(): boolean;
/**
* Returns `true` if content includes unsaved changes, or `false` otherwise.
*/
export function hasChangedContent(): boolean;
/**
* Returns `true` if any future editor history snapshots exist, or `false` otherwise.
*/
export function hasEditorRedo(): boolean;
/**
* Returns `true` if any past editor history snapshots exist, or `false` otherwise.
*/
export function hasEditorUndo(): boolean;
/**
* Returns `true` if an optimistic transaction is pending commit, for which the before state
* satisfies the given predicate function.
*
* @param predicate - Function given state, returning `true` if match.
*/
export function inSomeHistory(predicate: (state: Record<string, any>) => boolean): boolean;
/**
* Returns `true` if the post is autosaving, or `false` otherwise.
*/
export function isAutosavingPost(): boolean;
/**
* Returns `true` if there are no unsaved values for the current edit session and if the currently
* edited post is new (has never been saved before).
*/
export function isCleanNewPost(): boolean;
/**
* Returns `true` if post is pending review.
*/
export function isCurrentPostPending(): boolean;
/**
* Return `true` if the current post has already been published.
*/
export function isCurrentPostPublished(): boolean;
/**
* Returns `true` if post is already scheduled.
*/
export function isCurrentPostScheduled(): boolean;
/**
* Returns `true` if the post can be autosaved, or `false` otherwise.
*/
export function isEditedPostAutosaveable(): boolean;
/**
* Return `true` if the post being edited is being scheduled. Preferring the unsaved status values.
*/
export function isEditedPostBeingScheduled(): boolean;
/**
* Returns whether the current post should be considered to have a "floating" date (i.e. that it
* would publish "Immediately" rather than at a set time).
*
* @remarks
* Unlike in the PHP backend, the REST API returns a full date string for posts where the
* 0000-00-00T00:00:00 placeholder is present in the database. To infer that a post is set to
* publish "Immediately" we check whether the date and modified date are the same.
*/
export function isEditedPostDateFloating(): boolean;
/**
* Returns `true` if there are unsaved values for the current edit session, or `false` if the
* editing state matches the saved or new post.
*/
export function isEditedPostDirty(): boolean;
/**
* Returns `true` if the edited post has content. A post has content if it has at least one saveable
* block or otherwise has a non-empty content property assigned.
*/
export function isEditedPostEmpty(): boolean;
/**
* Returns `true` if the currently edited post is yet to be saved, or `false` if the post has been
* saved.
*/
export function isEditedPostNew(): boolean;
/**
* Return `true` if the post being edited can be published.
*/
export function isEditedPostPublishable(): boolean;
/**
* Returns `true` if the post can be saved, or `false` otherwise. A post must contain a title, an
* excerpt, or non-empty content to be valid for save.
*/
export function isEditedPostSaveable(): boolean;
/**
* Returns whether the permalink is editable or not.
*/
export function isPermalinkEditable(): boolean;
/**
* Returns whether the edition of the post has been taken over.
*/
export function isPostLockTakeover(): boolean;
/**
* Returns whether the post is locked.
*/
export function isPostLocked(): boolean;
/**
* Returns whether post saving is locked.
*/
export function isPostSavingLocked(): boolean;
/**
* Returns `true` if the post is being previewed, or `false` otherwise.
*/
export function isPreviewingPost(): boolean;
/**
* Returns whether the pre-publish panel should be shown or skipped when the user clicks the
* "publish" button.
*/
export function isPublishSidebarEnabled(): boolean;
/**
* Returns `true` if the post is being published, or `false` otherwise.
*/
export function isPublishingPost(): boolean;
/**
* Returns `true` if the post is currently being saved, or `false` otherwise.
*/
export function isSavingPost(): boolean; | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* A `KeyRingImportJob` can be used to create `CryptoKeys` and `CryptoKeyVersions` using pre-existing
* key material, generated outside of Cloud KMS. A `KeyRingImportJob` expires 3 days after it is created.
* Once expired, Cloud KMS will no longer be able to import or unwrap any key material that
* was wrapped with the `KeyRingImportJob`'s public key.
*
* > **Note:** KeyRingImportJobs cannot be deleted from Google Cloud Platform.
* Destroying a provider-managed KeyRingImportJob will remove it from state but
* *will not delete the resource from the project.*
*
* To get more information about KeyRingImportJob, see:
*
* * [API documentation](https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.keyRings.importJobs)
* * How-to Guides
* * [Importing a key](https://cloud.google.com/kms/docs/importing-a-key)
*
* ## Example Usage
*
* ## Import
*
* KeyRingImportJob can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:kms/keyRingImportJob:KeyRingImportJob default {{name}}
* ```
*/
export class KeyRingImportJob extends pulumi.CustomResource {
/**
* Get an existing KeyRingImportJob resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: KeyRingImportJobState, opts?: pulumi.CustomResourceOptions): KeyRingImportJob {
return new KeyRingImportJob(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:kms/keyRingImportJob:KeyRingImportJob';
/**
* Returns true if the given object is an instance of KeyRingImportJob. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is KeyRingImportJob {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === KeyRingImportJob.__pulumiType;
}
/**
* Statement that was generated and signed by the key creator (for example, an HSM) at key creation time. Use this
* statement to verify attributes of the key as stored on the HSM, independently of Google. Only present if the chosen
* ImportMethod is one with a protection level of HSM.
*/
public /*out*/ readonly attestations!: pulumi.Output<outputs.kms.KeyRingImportJobAttestation[]>;
/**
* The time at which this resource is scheduled for expiration and can no longer be used. This is in RFC3339 text format.
*/
public /*out*/ readonly expireTime!: pulumi.Output<string>;
/**
* It must be unique within a KeyRing and match the regular expression [a-zA-Z0-9_-]{1,63}
*/
public readonly importJobId!: pulumi.Output<string>;
/**
* The wrapping method to be used for incoming key material.
* Possible values are `RSA_OAEP_3072_SHA1_AES_256` and `RSA_OAEP_4096_SHA1_AES_256`.
*/
public readonly importMethod!: pulumi.Output<string>;
/**
* The KeyRing that this import job belongs to.
* Format: `'projects/{{project}}/locations/{{location}}/keyRings/{{keyRing}}'`.
*/
public readonly keyRing!: pulumi.Output<string>;
/**
* The resource name for this ImportJob in the format projects/*/locations/*/keyRings/*/importJobs/*.
*/
public /*out*/ readonly name!: pulumi.Output<string>;
/**
* The protection level of the ImportJob. This must match the protectionLevel of the
* versionTemplate on the CryptoKey you attempt to import into.
* Possible values are `SOFTWARE`, `HSM`, and `EXTERNAL`.
*/
public readonly protectionLevel!: pulumi.Output<string>;
/**
* The public key with which to wrap key material prior to import. Only returned if state is 'ACTIVE'.
*/
public /*out*/ readonly publicKeys!: pulumi.Output<outputs.kms.KeyRingImportJobPublicKey[]>;
/**
* The current state of the ImportJob, indicating if it can be used.
*/
public /*out*/ readonly state!: pulumi.Output<string>;
/**
* Create a KeyRingImportJob resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: KeyRingImportJobArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: KeyRingImportJobArgs | KeyRingImportJobState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as KeyRingImportJobState | undefined;
inputs["attestations"] = state ? state.attestations : undefined;
inputs["expireTime"] = state ? state.expireTime : undefined;
inputs["importJobId"] = state ? state.importJobId : undefined;
inputs["importMethod"] = state ? state.importMethod : undefined;
inputs["keyRing"] = state ? state.keyRing : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["protectionLevel"] = state ? state.protectionLevel : undefined;
inputs["publicKeys"] = state ? state.publicKeys : undefined;
inputs["state"] = state ? state.state : undefined;
} else {
const args = argsOrState as KeyRingImportJobArgs | undefined;
if ((!args || args.importJobId === undefined) && !opts.urn) {
throw new Error("Missing required property 'importJobId'");
}
if ((!args || args.importMethod === undefined) && !opts.urn) {
throw new Error("Missing required property 'importMethod'");
}
if ((!args || args.keyRing === undefined) && !opts.urn) {
throw new Error("Missing required property 'keyRing'");
}
if ((!args || args.protectionLevel === undefined) && !opts.urn) {
throw new Error("Missing required property 'protectionLevel'");
}
inputs["importJobId"] = args ? args.importJobId : undefined;
inputs["importMethod"] = args ? args.importMethod : undefined;
inputs["keyRing"] = args ? args.keyRing : undefined;
inputs["protectionLevel"] = args ? args.protectionLevel : undefined;
inputs["attestations"] = undefined /*out*/;
inputs["expireTime"] = undefined /*out*/;
inputs["name"] = undefined /*out*/;
inputs["publicKeys"] = undefined /*out*/;
inputs["state"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(KeyRingImportJob.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering KeyRingImportJob resources.
*/
export interface KeyRingImportJobState {
/**
* Statement that was generated and signed by the key creator (for example, an HSM) at key creation time. Use this
* statement to verify attributes of the key as stored on the HSM, independently of Google. Only present if the chosen
* ImportMethod is one with a protection level of HSM.
*/
attestations?: pulumi.Input<pulumi.Input<inputs.kms.KeyRingImportJobAttestation>[]>;
/**
* The time at which this resource is scheduled for expiration and can no longer be used. This is in RFC3339 text format.
*/
expireTime?: pulumi.Input<string>;
/**
* It must be unique within a KeyRing and match the regular expression [a-zA-Z0-9_-]{1,63}
*/
importJobId?: pulumi.Input<string>;
/**
* The wrapping method to be used for incoming key material.
* Possible values are `RSA_OAEP_3072_SHA1_AES_256` and `RSA_OAEP_4096_SHA1_AES_256`.
*/
importMethod?: pulumi.Input<string>;
/**
* The KeyRing that this import job belongs to.
* Format: `'projects/{{project}}/locations/{{location}}/keyRings/{{keyRing}}'`.
*/
keyRing?: pulumi.Input<string>;
/**
* The resource name for this ImportJob in the format projects/*/locations/*/keyRings/*/importJobs/*.
*/
name?: pulumi.Input<string>;
/**
* The protection level of the ImportJob. This must match the protectionLevel of the
* versionTemplate on the CryptoKey you attempt to import into.
* Possible values are `SOFTWARE`, `HSM`, and `EXTERNAL`.
*/
protectionLevel?: pulumi.Input<string>;
/**
* The public key with which to wrap key material prior to import. Only returned if state is 'ACTIVE'.
*/
publicKeys?: pulumi.Input<pulumi.Input<inputs.kms.KeyRingImportJobPublicKey>[]>;
/**
* The current state of the ImportJob, indicating if it can be used.
*/
state?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a KeyRingImportJob resource.
*/
export interface KeyRingImportJobArgs {
/**
* It must be unique within a KeyRing and match the regular expression [a-zA-Z0-9_-]{1,63}
*/
importJobId: pulumi.Input<string>;
/**
* The wrapping method to be used for incoming key material.
* Possible values are `RSA_OAEP_3072_SHA1_AES_256` and `RSA_OAEP_4096_SHA1_AES_256`.
*/
importMethod: pulumi.Input<string>;
/**
* The KeyRing that this import job belongs to.
* Format: `'projects/{{project}}/locations/{{location}}/keyRings/{{keyRing}}'`.
*/
keyRing: pulumi.Input<string>;
/**
* The protection level of the ImportJob. This must match the protectionLevel of the
* versionTemplate on the CryptoKey you attempt to import into.
* Possible values are `SOFTWARE`, `HSM`, and `EXTERNAL`.
*/
protectionLevel: pulumi.Input<string>;
} | the_stack |
import { Interpolation } from '@emotion/react';
export type PropsOf<C extends keyof JSX.IntrinsicElements | React.JSXElementConstructor<any>> =
JSX.LibraryManagedAttributes<C, React.ComponentPropsWithRef<C>>;
export type AddOptionalTo<T, U> = DistributiveOmit<T, U> & Partial<Pick<T, Extract<keyof T, U>>>;
type DistributiveOmit<T, U> = T extends any ? Pick<T, Exclude<keyof T, U>> : never;
type Overwrapped<T, U> = Pick<T, Extract<keyof T, keyof U>>;
type JSXInEl = JSX.IntrinsicElements;
type ReactClassPropKeys = keyof React.ClassAttributes<any>;
type WithTheme<P, T> = P extends { theme: infer Theme }
? P & { theme: Exclude<Theme, undefined> }
: P & { theme: T };
interface StyledOptions {
label?: string;
shouldForwardProp?(propName: string): boolean;
target?: string;
}
interface ComponentSelector {
__emotion_styles: any;
}
export interface StyledComponent<InnerProps, StyleProps, Theme extends object>
extends React.FC<InnerProps & DistributiveOmit<StyleProps, 'theme'> & { theme?: Theme }>,
ComponentSelector {
/**
* @desc this method is type-unsafe
*/
withComponent<NewTag extends keyof JSXInEl>(
tag: NewTag
): StyledComponent<JSXInEl[NewTag], StyleProps, Theme>;
withComponent<Tag extends React.ComponentType<any>>(
tag: Tag
): StyledComponent<PropsOf<Tag>, StyleProps, Theme>;
}
interface CreateStyledComponentBaseThemeless<InnerProps, ExtraProps> {
<
StyleProps extends DistributiveOmit<
Overwrapped<InnerProps, StyleProps>,
ReactClassPropKeys
> = DistributiveOmit<InnerProps & ExtraProps, ReactClassPropKeys>,
Theme extends object = object
>(
...styles: Array<Interpolation<WithTheme<StyleProps, Theme>>>
): StyledComponent<InnerProps, StyleProps, Theme>;
<
StyleProps extends DistributiveOmit<
Overwrapped<InnerProps, StyleProps>,
ReactClassPropKeys
> = DistributiveOmit<InnerProps & ExtraProps, ReactClassPropKeys>,
Theme extends object = object
>(
template: TemplateStringsArray,
...styles: Array<Interpolation<WithTheme<StyleProps, Theme>>>
): StyledComponent<InnerProps, StyleProps, Theme>;
}
interface CreateStyledComponentBaseThemed<
InnerProps,
ExtraProps,
StyledInstanceTheme extends object
> {
<
StyleProps extends DistributiveOmit<
Overwrapped<InnerProps, StyleProps>,
ReactClassPropKeys
> = DistributiveOmit<InnerProps & ExtraProps, ReactClassPropKeys>
>(
...styles: Array<Interpolation<WithTheme<StyleProps, StyledInstanceTheme>>>
): StyledComponent<InnerProps, StyleProps, StyledInstanceTheme>;
<
StyleProps extends DistributiveOmit<
Overwrapped<InnerProps, StyleProps>,
ReactClassPropKeys
> = DistributiveOmit<InnerProps & ExtraProps, ReactClassPropKeys>
>(
template: TemplateStringsArray,
...styles: Array<Interpolation<WithTheme<StyleProps, StyledInstanceTheme>>>
): StyledComponent<InnerProps, StyleProps, StyledInstanceTheme>;
}
type CreateStyledComponentBase<InnerProps, ExtraProps, StyledInstanceTheme extends object> =
// this "reversed" condition checks if StyledInstanceTheme was already parametrized when using CreateStyled
object extends StyledInstanceTheme
? CreateStyledComponentBaseThemeless<InnerProps, ExtraProps>
: CreateStyledComponentBaseThemed<InnerProps, ExtraProps, StyledInstanceTheme>;
type CreateStyledComponentIntrinsic<Tag extends keyof JSXInEl, ExtraProps, Theme extends object> =
CreateStyledComponentBase<JSXInEl[Tag], ExtraProps, Theme>;
type CreateStyledComponentExtrinsic<
Tag extends React.ComponentType<any>,
ExtraProps,
Theme extends object
> = CreateStyledComponentBase<PropsOf<Tag>, ExtraProps, Theme>;
interface StyledTags<Theme extends object> {
/**
* @desc
* HTML tags
*/
a: CreateStyledComponentIntrinsic<'a', {}, Theme>;
abbr: CreateStyledComponentIntrinsic<'abbr', {}, Theme>;
address: CreateStyledComponentIntrinsic<'address', {}, Theme>;
area: CreateStyledComponentIntrinsic<'area', {}, Theme>;
article: CreateStyledComponentIntrinsic<'article', {}, Theme>;
aside: CreateStyledComponentIntrinsic<'aside', {}, Theme>;
audio: CreateStyledComponentIntrinsic<'audio', {}, Theme>;
b: CreateStyledComponentIntrinsic<'b', {}, Theme>;
base: CreateStyledComponentIntrinsic<'base', {}, Theme>;
bdi: CreateStyledComponentIntrinsic<'bdi', {}, Theme>;
bdo: CreateStyledComponentIntrinsic<'bdo', {}, Theme>;
big: CreateStyledComponentIntrinsic<'big', {}, Theme>;
blockquote: CreateStyledComponentIntrinsic<'blockquote', {}, Theme>;
body: CreateStyledComponentIntrinsic<'body', {}, Theme>;
br: CreateStyledComponentIntrinsic<'br', {}, Theme>;
button: CreateStyledComponentIntrinsic<'button', {}, Theme>;
canvas: CreateStyledComponentIntrinsic<'canvas', {}, Theme>;
caption: CreateStyledComponentIntrinsic<'caption', {}, Theme>;
cite: CreateStyledComponentIntrinsic<'cite', {}, Theme>;
code: CreateStyledComponentIntrinsic<'code', {}, Theme>;
col: CreateStyledComponentIntrinsic<'col', {}, Theme>;
colgroup: CreateStyledComponentIntrinsic<'colgroup', {}, Theme>;
data: CreateStyledComponentIntrinsic<'data', {}, Theme>;
datalist: CreateStyledComponentIntrinsic<'datalist', {}, Theme>;
dd: CreateStyledComponentIntrinsic<'dd', {}, Theme>;
del: CreateStyledComponentIntrinsic<'del', {}, Theme>;
details: CreateStyledComponentIntrinsic<'details', {}, Theme>;
dfn: CreateStyledComponentIntrinsic<'dfn', {}, Theme>;
dialog: CreateStyledComponentIntrinsic<'dialog', {}, Theme>;
div: CreateStyledComponentIntrinsic<'div', {}, Theme>;
dl: CreateStyledComponentIntrinsic<'dl', {}, Theme>;
dt: CreateStyledComponentIntrinsic<'dt', {}, Theme>;
em: CreateStyledComponentIntrinsic<'em', {}, Theme>;
embed: CreateStyledComponentIntrinsic<'embed', {}, Theme>;
fieldset: CreateStyledComponentIntrinsic<'fieldset', {}, Theme>;
figcaption: CreateStyledComponentIntrinsic<'figcaption', {}, Theme>;
figure: CreateStyledComponentIntrinsic<'figure', {}, Theme>;
footer: CreateStyledComponentIntrinsic<'footer', {}, Theme>;
form: CreateStyledComponentIntrinsic<'form', {}, Theme>;
h1: CreateStyledComponentIntrinsic<'h1', {}, Theme>;
h2: CreateStyledComponentIntrinsic<'h2', {}, Theme>;
h3: CreateStyledComponentIntrinsic<'h3', {}, Theme>;
h4: CreateStyledComponentIntrinsic<'h4', {}, Theme>;
h5: CreateStyledComponentIntrinsic<'h5', {}, Theme>;
h6: CreateStyledComponentIntrinsic<'h6', {}, Theme>;
head: CreateStyledComponentIntrinsic<'head', {}, Theme>;
header: CreateStyledComponentIntrinsic<'header', {}, Theme>;
hgroup: CreateStyledComponentIntrinsic<'hgroup', {}, Theme>;
hr: CreateStyledComponentIntrinsic<'hr', {}, Theme>;
html: CreateStyledComponentIntrinsic<'html', {}, Theme>;
i: CreateStyledComponentIntrinsic<'i', {}, Theme>;
iframe: CreateStyledComponentIntrinsic<'iframe', {}, Theme>;
img: CreateStyledComponentIntrinsic<'img', {}, Theme>;
input: CreateStyledComponentIntrinsic<'input', {}, Theme>;
ins: CreateStyledComponentIntrinsic<'ins', {}, Theme>;
kbd: CreateStyledComponentIntrinsic<'kbd', {}, Theme>;
keygen: CreateStyledComponentIntrinsic<'keygen', {}, Theme>;
label: CreateStyledComponentIntrinsic<'label', {}, Theme>;
legend: CreateStyledComponentIntrinsic<'legend', {}, Theme>;
li: CreateStyledComponentIntrinsic<'li', {}, Theme>;
link: CreateStyledComponentIntrinsic<'link', {}, Theme>;
main: CreateStyledComponentIntrinsic<'main', {}, Theme>;
map: CreateStyledComponentIntrinsic<'map', {}, Theme>;
mark: CreateStyledComponentIntrinsic<'mark', {}, Theme>;
/**
* @desc
* marquee tag is not supported by @types/react
*/
// 'marquee': CreateStyledComponentIntrinsic<'marquee', {}, Theme>;
menu: CreateStyledComponentIntrinsic<'menu', {}, Theme>;
menuitem: CreateStyledComponentIntrinsic<'menuitem', {}, Theme>;
meta: CreateStyledComponentIntrinsic<'meta', {}, Theme>;
meter: CreateStyledComponentIntrinsic<'meter', {}, Theme>;
nav: CreateStyledComponentIntrinsic<'nav', {}, Theme>;
noscript: CreateStyledComponentIntrinsic<'noscript', {}, Theme>;
object: CreateStyledComponentIntrinsic<'object', {}, Theme>;
ol: CreateStyledComponentIntrinsic<'ol', {}, Theme>;
optgroup: CreateStyledComponentIntrinsic<'optgroup', {}, Theme>;
option: CreateStyledComponentIntrinsic<'option', {}, Theme>;
output: CreateStyledComponentIntrinsic<'output', {}, Theme>;
p: CreateStyledComponentIntrinsic<'p', {}, Theme>;
param: CreateStyledComponentIntrinsic<'param', {}, Theme>;
picture: CreateStyledComponentIntrinsic<'picture', {}, Theme>;
pre: CreateStyledComponentIntrinsic<'pre', {}, Theme>;
progress: CreateStyledComponentIntrinsic<'progress', {}, Theme>;
q: CreateStyledComponentIntrinsic<'q', {}, Theme>;
rp: CreateStyledComponentIntrinsic<'rp', {}, Theme>;
rt: CreateStyledComponentIntrinsic<'rt', {}, Theme>;
ruby: CreateStyledComponentIntrinsic<'ruby', {}, Theme>;
s: CreateStyledComponentIntrinsic<'s', {}, Theme>;
samp: CreateStyledComponentIntrinsic<'samp', {}, Theme>;
script: CreateStyledComponentIntrinsic<'script', {}, Theme>;
section: CreateStyledComponentIntrinsic<'section', {}, Theme>;
select: CreateStyledComponentIntrinsic<'select', {}, Theme>;
small: CreateStyledComponentIntrinsic<'small', {}, Theme>;
source: CreateStyledComponentIntrinsic<'source', {}, Theme>;
span: CreateStyledComponentIntrinsic<'span', {}, Theme>;
strong: CreateStyledComponentIntrinsic<'strong', {}, Theme>;
style: CreateStyledComponentIntrinsic<'style', {}, Theme>;
sub: CreateStyledComponentIntrinsic<'sub', {}, Theme>;
summary: CreateStyledComponentIntrinsic<'summary', {}, Theme>;
sup: CreateStyledComponentIntrinsic<'sup', {}, Theme>;
table: CreateStyledComponentIntrinsic<'table', {}, Theme>;
tbody: CreateStyledComponentIntrinsic<'tbody', {}, Theme>;
td: CreateStyledComponentIntrinsic<'td', {}, Theme>;
textarea: CreateStyledComponentIntrinsic<'textarea', {}, Theme>;
tfoot: CreateStyledComponentIntrinsic<'tfoot', {}, Theme>;
th: CreateStyledComponentIntrinsic<'th', {}, Theme>;
thead: CreateStyledComponentIntrinsic<'thead', {}, Theme>;
time: CreateStyledComponentIntrinsic<'time', {}, Theme>;
title: CreateStyledComponentIntrinsic<'title', {}, Theme>;
tr: CreateStyledComponentIntrinsic<'tr', {}, Theme>;
track: CreateStyledComponentIntrinsic<'track', {}, Theme>;
u: CreateStyledComponentIntrinsic<'u', {}, Theme>;
ul: CreateStyledComponentIntrinsic<'ul', {}, Theme>;
var: CreateStyledComponentIntrinsic<'var', {}, Theme>;
video: CreateStyledComponentIntrinsic<'video', {}, Theme>;
wbr: CreateStyledComponentIntrinsic<'wbr', {}, Theme>;
/**
* @desc
* SVG tags
*/
circle: CreateStyledComponentIntrinsic<'circle', {}, Theme>;
clipPath: CreateStyledComponentIntrinsic<'clipPath', {}, Theme>;
defs: CreateStyledComponentIntrinsic<'defs', {}, Theme>;
ellipse: CreateStyledComponentIntrinsic<'ellipse', {}, Theme>;
foreignObject: CreateStyledComponentIntrinsic<'foreignObject', {}, Theme>;
g: CreateStyledComponentIntrinsic<'g', {}, Theme>;
image: CreateStyledComponentIntrinsic<'image', {}, Theme>;
line: CreateStyledComponentIntrinsic<'line', {}, Theme>;
linearGradient: CreateStyledComponentIntrinsic<'linearGradient', {}, Theme>;
mask: CreateStyledComponentIntrinsic<'mask', {}, Theme>;
path: CreateStyledComponentIntrinsic<'path', {}, Theme>;
pattern: CreateStyledComponentIntrinsic<'pattern', {}, Theme>;
polygon: CreateStyledComponentIntrinsic<'polygon', {}, Theme>;
polyline: CreateStyledComponentIntrinsic<'polyline', {}, Theme>;
radialGradient: CreateStyledComponentIntrinsic<'radialGradient', {}, Theme>;
rect: CreateStyledComponentIntrinsic<'rect', {}, Theme>;
stop: CreateStyledComponentIntrinsic<'stop', {}, Theme>;
svg: CreateStyledComponentIntrinsic<'svg', {}, Theme>;
text: CreateStyledComponentIntrinsic<'text', {}, Theme>;
tspan: CreateStyledComponentIntrinsic<'tspan', {}, Theme>;
}
interface BaseCreateStyled<Theme extends object = any> {
<Tag extends React.ComponentType<any>, ExtraProps = {}>(
tag: Tag,
options?: StyledOptions
): CreateStyledComponentExtrinsic<Tag, ExtraProps, Theme>;
<Tag extends keyof JSXInEl, ExtraProps = {}>(
tag: Tag,
options?: StyledOptions
): CreateStyledComponentIntrinsic<Tag, ExtraProps, Theme>;
}
export interface CreateStyled<Theme extends object = any>
extends BaseCreateStyled<Theme>,
StyledTags<Theme> {} | the_stack |
import {
IAutorunOptions,
IReactionDisposer,
IReactionOptions,
IReactionPublic,
autorun,
observable,
reaction,
} from 'mobx';
import {OmitValueOfKey, OmitValueWithType} from 'tslang';
import {testPathPrefix, tolerate} from '../@utils';
import {IHistory} from '../history';
import {RouteBuilder} from '../route-builder';
import {Router} from '../router';
import {NextRouteMatch} from './next-route-match';
import {
GeneralParamDict,
GeneralSegmentDict,
RouteMatchShared,
RouteMatchSharedOptions,
} from './route-match-shared';
/////////////////////
// lifecycle hooks //
/////////////////////
export interface RouteUpdateCallbackData {
descendants: boolean;
}
// before enter //
/**
* Route before enter callback.
* @return Return `true` or `undefined` to do nothing; return `false` to revert
* this history change; return full path to redirect.
*/
export type RouteBeforeEnterCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (next: TRouteMatch['$next']) => Promise<boolean | void> | boolean | void;
// before update //
/**
* Route before update callback.
* @return Return `true` or `undefined` to do nothing; return `false` to revert
* this history change; return full path to redirect.
*/
export type RouteBeforeUpdateCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (
next: TRouteMatch['$next'],
data: RouteUpdateCallbackData,
) => Promise<boolean | void> | boolean | void;
export interface RouteBeforeUpdateOptions {
traceDescendants: boolean;
}
export interface RouteBeforeUpdateEntry<
TRouteMatch extends RouteMatch = RouteMatch,
> {
callback: RouteBeforeUpdateCallback<TRouteMatch>;
options?: RouteBeforeUpdateOptions;
}
// before leave //
/**
* Route before leave callback.
* @return Return `true` or `undefined` to do nothing; return `false` to revert
* this history change.
*/
export type RouteBeforeLeaveCallback = () =>
| Promise<boolean | void>
| boolean
| void;
// will enter //
export type RouteWillEnterCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (next: TRouteMatch['$next']) => Promise<void> | void;
// will update //
export type RouteWillUpdateCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (
next: TRouteMatch['$next'],
data: RouteUpdateCallbackData,
) => Promise<void> | void;
export interface RouteWillUpdateOptions {
traceDescendants: boolean;
}
export interface RouteWillUpdateEntry<
TRouteMatch extends RouteMatch = RouteMatch,
> {
callback: RouteWillUpdateCallback<TRouteMatch>;
options?: RouteWillUpdateOptions;
}
// will leave //
export type RouteWillLeaveCallback = () => Promise<void> | void;
// after enter //
export type RouteAfterEnterCallback = () => void;
// after update //
/**
* Route after update callback.
*/
export type RouteAfterUpdateCallback = (data: RouteUpdateCallbackData) => void;
export interface RouteAfterUpdateOptions {
traceDescendants: boolean;
}
export interface RouteAfterUpdateEntry {
options?: RouteAfterUpdateOptions;
callback: RouteAfterUpdateCallback;
}
// after leave //
export type RouteAfterLeaveCallback = () => void;
// reactive //
export type RouteReactiveDisposer = IReactionDisposer;
export type RouteAutorunView = (reaction: IReactionPublic) => void;
export type RouteAutorunOptions = IAutorunOptions;
interface RouteAutorunEntry {
type: 'autorun';
view: RouteAutorunView;
options: RouteAutorunOptions | undefined;
disposer: RouteReactiveDisposer | undefined;
}
export type RouteReactionExpression<T> = (reaction: IReactionPublic) => T;
export type RouteReactionEffect<T> = (
value: T,
previousValue: T,
reaction: IReactionPublic,
) => void;
export type RouteReactionOptions<T> = IReactionOptions<T> | undefined;
interface RouteReactionEntry<T = unknown> {
type: 'reaction';
expression: RouteReactionExpression<T>;
effect: RouteReactionEffect<T>;
options: RouteReactionOptions<T> | undefined;
disposer: RouteReactiveDisposer | undefined;
}
// removal //
export type RouteHookRemovalCallback = () => void;
// enter or update combination //
export type RouteBeforeEnterOrUpdateCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (next: TRouteMatch['$next']) => Promise<boolean | void> | boolean | void;
export type RouteWillEnterOrUpdateCallback<
TRouteMatch extends RouteMatch = RouteMatch,
> = (next: TRouteMatch['$next']) => Promise<void> | void;
export type RouteAfterEnterOrUpdateCallback = () => void;
///
export type RouteServiceFactory<TRouteMatch extends RouteMatch> = (
match: TRouteMatch,
) => IRouteService<TRouteMatch> | Promise<IRouteService<TRouteMatch>>;
export type IRouteService<TRouteMatch extends RouteMatch = RouteMatch> = {
beforeEnter?: RouteBeforeEnterCallback<TRouteMatch>;
willEnter?: RouteWillEnterCallback;
afterEnter?: RouteAfterEnterCallback;
beforeUpdate?: RouteBeforeUpdateCallback<TRouteMatch>;
willUpdate?: RouteWillUpdateCallback<TRouteMatch>;
afterUpdate?: RouteAfterUpdateCallback;
beforeLeave?: RouteBeforeLeaveCallback;
willLeave?: RouteWillLeaveCallback;
afterLeave?: RouteAfterLeaveCallback;
} & RouteServiceExtension<TRouteMatch>;
export type RouteServiceExtension<TRouteMatch extends RouteMatch> =
OmitValueWithType<
OmitValueOfKey<TRouteMatch, keyof RouteMatch>,
RouteMatch,
false
>;
type RouteReactiveEntry = RouteAutorunEntry | RouteReactionEntry;
interface RouteMatchInternalResult {
matched: boolean;
exactlyMatched: boolean;
segment: string | undefined;
rest: string;
}
export interface RouteMatchParallelOptions<TGroupName extends string> {
groups?: TGroupName[];
matches?: RouteMatch[];
}
/** @internal */
export interface RouteMatchUpdateResult {
pathSegmentDict: GeneralSegmentDict;
paramSegmentDict: GeneralSegmentDict;
}
export interface RouteMatchEntry {
match: RouteMatch;
exact: boolean;
segment: string;
rest: string;
}
export interface RouteSourceQuery {
id: string | symbol | true;
value: string;
}
export interface RouteSource {
groupToMatchToMatchEntryMapMap: Map<
string | undefined,
Map<RouteMatch, RouteMatchEntry>
>;
queryMap: Map<string, RouteSourceQuery>;
pathMap: Map<string | undefined, string>;
}
export interface RouteMatchOptions extends RouteMatchSharedOptions {
exact: boolean | string;
metadata: object | undefined;
}
export class RouteMatch<
TParamDict extends GeneralParamDict = GeneralParamDict,
TNextRouteMatch extends NextRouteMatch<TParamDict> = NextRouteMatch<TParamDict>,
TSpecificGroupName extends string | undefined = string | undefined,
TGroupName extends string = string,
TMetadata extends object = object,
> extends RouteMatchShared<TParamDict, TSpecificGroupName, TGroupName> {
declare readonly $parent: RouteMatch | undefined;
readonly $next!: TNextRouteMatch;
readonly $metadata: TMetadata;
/** @internal */
private _beforeEnterCallbackSet = new Set<RouteBeforeEnterCallback>();
/** @internal */
private _beforeUpdateEntrySet = new Set<RouteBeforeUpdateEntry>();
/** @internal */
private _beforeLeaveCallbackSet = new Set<RouteBeforeLeaveCallback>();
/** @internal */
private _willEnterCallbackSet = new Set<RouteWillEnterCallback>();
/** @internal */
private _willUpdateEntrySet = new Set<RouteWillUpdateEntry>();
/** @internal */
private _willLeaveCallbackSet = new Set<RouteWillLeaveCallback>();
/** @internal */
private _afterEnterCallbackSet = new Set<RouteAfterEnterCallback>();
/** @internal */
private _afterUpdateEntrySet = new Set<RouteAfterUpdateEntry>();
/** @internal */
private _afterLeaveCallbackSet = new Set<RouteAfterLeaveCallback>();
/** @internal */
private _reactiveEntrySet = new Set<RouteReactiveEntry>();
/** @internal */
@observable
private _service: IRouteService | undefined;
/** @internal */
private _servicePromise: Promise<IRouteService | undefined> | undefined;
/** @internal */
private _serviceFactory: RouteServiceFactory<any> | undefined;
/** @internal */
private _allowExact: boolean | string;
/** @internal */
_parallel: RouteMatchParallelOptions<TGroupName> | undefined;
constructor(
name: string,
router: Router<TGroupName>,
source: RouteSource,
parent: RouteMatch | undefined,
extension: object | undefined,
history: IHistory,
{exact, metadata, ...sharedOptions}: RouteMatchOptions,
) {
super(name, router, source, parent, history, sharedOptions);
if (extension) {
for (let key of Object.keys(extension)) {
Object.defineProperty(this, key, {
get(this: RouteMatch) {
let service = this.$matched ? this._service : undefined;
return service && key in (service as any)
? (service as any)[key]
: (extension as any)[key];
},
});
}
}
this.$metadata = {
...parent?.$metadata,
...metadata,
} as TMetadata;
this._allowExact = exact;
}
$beforeEnter(
callback: RouteBeforeEnterCallback<this>,
): RouteHookRemovalCallback {
this._beforeEnterCallbackSet.add(callback);
return () => {
this._beforeEnterCallbackSet.delete(callback);
};
}
$beforeUpdate(
callback: RouteBeforeUpdateCallback<this>,
options?: RouteBeforeUpdateOptions,
): RouteHookRemovalCallback {
let entry: RouteBeforeUpdateEntry<this> = {
callback,
options,
};
this._beforeUpdateEntrySet.add(entry);
return () => {
this._beforeUpdateEntrySet.delete(entry);
};
}
$beforeLeave(callback: RouteBeforeLeaveCallback): RouteHookRemovalCallback {
this._beforeLeaveCallbackSet.add(callback);
return () => {
this._beforeLeaveCallbackSet.delete(callback);
};
}
$willEnter(callback: RouteWillEnterCallback): RouteHookRemovalCallback {
this._willEnterCallbackSet.add(callback);
return () => {
this._willEnterCallbackSet.delete(callback);
};
}
$willUpdate(
callback: RouteWillUpdateCallback,
options?: RouteWillUpdateOptions,
): RouteHookRemovalCallback {
let willUpdateEntry: RouteWillUpdateEntry = {
callback,
options,
};
this._willUpdateEntrySet.add(willUpdateEntry);
return () => {
this._willUpdateEntrySet.delete(willUpdateEntry);
};
}
$willLeave(callback: RouteWillLeaveCallback): RouteHookRemovalCallback {
this._willLeaveCallbackSet.add(callback);
return () => {
this._willLeaveCallbackSet.delete(callback);
};
}
$afterEnter(callback: RouteAfterEnterCallback): RouteHookRemovalCallback {
this._afterEnterCallbackSet.add(callback);
return () => {
this._afterEnterCallbackSet.delete(callback);
};
}
$afterUpdate(
callback: RouteAfterUpdateCallback,
options?: RouteAfterUpdateOptions,
): RouteHookRemovalCallback {
let afterUpdateEntry: RouteAfterUpdateEntry = {
callback,
options,
};
this._afterUpdateEntrySet.add(afterUpdateEntry);
return () => {
this._afterUpdateEntrySet.delete(afterUpdateEntry);
};
}
$afterLeave(callback: RouteAfterLeaveCallback): RouteHookRemovalCallback {
this._afterLeaveCallbackSet.add(callback);
return () => {
this._afterLeaveCallbackSet.delete(callback);
};
}
$autorun(
view: RouteAutorunView,
options?: RouteAutorunOptions,
): RouteHookRemovalCallback {
let autorunEntry: RouteAutorunEntry = {
type: 'autorun',
view,
options,
disposer: undefined,
};
this._reactiveEntrySet.add(autorunEntry);
if (this.$matched) {
tolerate(() => {
autorunEntry.disposer = autorun(view, options);
});
}
return () => {
if (autorunEntry.disposer) {
autorunEntry.disposer();
autorunEntry.disposer = undefined;
}
this._reactiveEntrySet.delete(autorunEntry);
};
}
$reaction<T>(
expression: RouteReactionExpression<T>,
effect: RouteReactionEffect<T>,
options?: RouteReactionOptions<T>,
): RouteHookRemovalCallback {
let reactionEntry: RouteReactionEntry = {
type: 'reaction',
expression,
effect,
options,
disposer: undefined,
};
this._reactiveEntrySet.add(reactionEntry);
if (this.$matched) {
tolerate(() => {
reactionEntry.disposer = reaction(expression, effect, options);
});
}
return () => {
if (reactionEntry.disposer) {
reactionEntry.disposer();
reactionEntry.disposer = undefined;
}
this._reactiveEntrySet.delete(reactionEntry);
};
}
$beforeEnterOrUpdate(
callback: RouteBeforeEnterOrUpdateCallback<this>,
beforeUpdateOptions?: RouteBeforeUpdateOptions,
): RouteHookRemovalCallback {
let beforeUpdateEntry: RouteBeforeUpdateEntry<this> = {
callback,
options: beforeUpdateOptions,
};
this._beforeEnterCallbackSet.add(callback);
this._beforeUpdateEntrySet.add(beforeUpdateEntry);
return () => {
this._beforeEnterCallbackSet.delete(callback);
this._beforeUpdateEntrySet.delete(beforeUpdateEntry);
};
}
$willEnterOrUpdate(
callback: RouteWillEnterOrUpdateCallback<this>,
willUpdateOptions?: RouteWillUpdateOptions,
): RouteHookRemovalCallback {
let willUpdateEntry: RouteWillUpdateEntry<this> = {
callback,
options: willUpdateOptions,
};
this._willEnterCallbackSet.add(callback);
this._willUpdateEntrySet.add(willUpdateEntry);
return () => {
this._willEnterCallbackSet.delete(callback);
this._willUpdateEntrySet.delete(willUpdateEntry);
};
}
$afterEnterOrUpdate(
callback: RouteAfterEnterOrUpdateCallback,
afterUpdateOptions?: RouteAfterUpdateOptions,
): RouteHookRemovalCallback {
let afterUpdateEntry: RouteAfterUpdateEntry = {
callback,
options: afterUpdateOptions,
};
this._afterEnterCallbackSet.add(callback);
this._afterUpdateEntrySet.add(afterUpdateEntry);
return () => {
this._afterEnterCallbackSet.delete(callback);
this._afterUpdateEntrySet.delete(afterUpdateEntry);
};
}
$service(factory: RouteServiceFactory<this>): this {
if (this._serviceFactory) {
throw new Error(`Service has already been defined for "${this.$name}"`);
}
this._serviceFactory = factory;
return this;
}
$parallel(options: RouteMatchParallelOptions<TGroupName>): void {
if (this.$group) {
throw new Error('Parallel whitelist can only be set on primary routes');
}
let {groups = [], matches = []} = options;
let parent = this.$parent;
if (parent instanceof RouteMatch && parent._parallel) {
let {groups: parentGroups = [], matches: parentMatches = []} =
parent._parallel;
let parentGroupSet = new Set(parentGroups);
let parentMatchSet = new Set(parentMatches);
let groupsBeingSubsetOfParents = groups.every(group =>
parentGroupSet.has(group),
);
if (!groupsBeingSubsetOfParents) {
throw new Error(
"Parallel group can only be a subset of its parent's groups",
);
}
let matchesBeingSubsetOfParents = matches.every(match => {
if (
typeof match.$group === 'string' &&
parentGroupSet.has(match.$group)
) {
return true;
}
let current: RouteMatch | undefined = match;
while (current) {
if (parentMatchSet.has(current)) {
return true;
}
current = current.$parent;
}
return false;
});
if (!matchesBeingSubsetOfParents) {
throw new Error(
"Parallel match can only be a subset of its parent's matches",
);
}
}
this._parallel = options;
let children = this._children || [];
for (let child of children) {
if (
child._parallel &&
(!parent || parent._parallel !== child._parallel)
) {
throw new Error(
'Parallel options can only be specified in a top-down fashion',
);
}
child.$parallel(options);
}
}
/** @internal */
_match(upperRest: string): RouteMatchInternalResult {
let pattern = this._matchPattern;
let segment: string | undefined;
let rest: string;
if (typeof pattern === 'string') {
if (testPathPrefix(upperRest, pattern)) {
segment = pattern;
rest = upperRest.slice(pattern.length);
if (rest.startsWith('/')) {
rest = rest.slice(1);
}
} else {
segment = undefined;
rest = '';
}
} else {
let groups = pattern.exec(upperRest);
if (groups) {
let matched = groups[0];
if (testPathPrefix(upperRest, matched)) {
segment = matched;
rest = upperRest.slice(matched.length);
if (rest.startsWith('/')) {
rest = rest.slice(1);
}
} else {
segment = undefined;
rest = '';
}
} else {
segment = undefined;
rest = '';
}
}
let matched = segment !== undefined;
let exactlyMatched = matched && rest === '';
if (exactlyMatched) {
let allowExact = this._allowExact;
if (typeof allowExact === 'string') {
// Specify a default rest path on an exact match.
rest = allowExact;
} else if (this._children && !allowExact) {
// If this route has children and does not allow exact match, then this
// match is invalid and reset `matched` and `exactlyMatched` to false.
matched = false;
exactlyMatched = false;
}
}
return {
matched,
exactlyMatched,
segment,
rest,
};
}
/** @internal */
async _beforeLeave(): Promise<boolean> {
let results = await Promise.all([
...Array.from(this._beforeLeaveCallbackSet).map(callback =>
tolerate(callback),
),
(async () => {
let service = await this._getService();
if (service && service.beforeLeave) {
return tolerate(() => service!.beforeLeave!());
}
})(),
]);
return !results.some(result => result === false);
}
/** @internal */
async _beforeEnter(): Promise<boolean> {
let next = this.$next;
let results = await Promise.all([
...Array.from(this._beforeEnterCallbackSet).map(callback =>
tolerate(callback, next),
),
(async () => {
let service = await this._getService();
if (service && service.beforeEnter) {
return tolerate(() => service!.beforeEnter!(next));
}
})(),
]);
return !results.some(result => result === false);
}
/** @internal */
async _beforeUpdate(
triggeredByDescendants: boolean,
): Promise<boolean | RouteMatch> {
let next = this.$next;
let results = await Promise.all([
...Array.from(this._beforeUpdateEntrySet)
.filter(({options}) =>
triggeredByDescendants ? options && options.traceDescendants : true,
)
.map(({callback}) =>
tolerate(callback, next, {descendants: triggeredByDescendants}),
),
(async () => {
let service = await this._getService();
if (service && service.beforeUpdate) {
return tolerate(() =>
service!.beforeUpdate!(next, {descendants: triggeredByDescendants}),
);
}
})(),
]);
return !results.some(result => result === false);
}
/** @internal */
async _willLeave(): Promise<void> {
for (let reactiveEntry of this._reactiveEntrySet) {
if (reactiveEntry.disposer) {
reactiveEntry.disposer();
reactiveEntry.disposer = undefined;
}
}
await Promise.all([
...Array.from(this._willLeaveCallbackSet).map(callback =>
tolerate(callback),
),
(async () => {
let service = await this._getService();
if (service && service.willLeave) {
return tolerate(() => service!.willLeave!());
}
})(),
]);
}
/** @internal */
async _willEnter(): Promise<void> {
let next = this.$next;
await Promise.all([
...Array.from(this._willEnterCallbackSet).map(callback =>
tolerate(callback, next),
),
(async () => {
let service = await this._getService();
if (service && service.willEnter) {
return tolerate(() => service!.willEnter!(next));
}
})(),
]);
}
/** @internal */
async _willUpdate(triggeredByDescendants: boolean): Promise<void> {
let next = this.$next;
await Promise.all([
...Array.from(this._willUpdateEntrySet)
.filter(({options}) =>
triggeredByDescendants ? options && options.traceDescendants : true,
)
.map(({callback}) =>
tolerate(callback, next, {descendants: triggeredByDescendants}),
),
(async () => {
let service = await this._getService();
if (service && service.willUpdate) {
return tolerate(() =>
service!.willUpdate!(next, {descendants: triggeredByDescendants}),
);
}
})(),
]);
}
/** @internal */
async _afterLeave(): Promise<void> {
for (let callback of this._afterLeaveCallbackSet) {
tolerate(callback);
}
let service = await this._getService();
if (service && service.afterLeave) {
tolerate(() => service!.afterLeave!());
}
}
/** @internal */
async _afterEnter(): Promise<void> {
for (let callback of this._afterEnterCallbackSet) {
tolerate(callback);
}
let service = await this._getService();
if (service && service.afterEnter) {
tolerate(() => service!.afterEnter!());
}
for (let reactiveEntry of this._reactiveEntrySet) {
if (reactiveEntry.disposer) {
reactiveEntry.disposer();
console.warn('Unexpected disposer during afterEnter phase.');
}
tolerate(() => {
switch (reactiveEntry.type) {
case 'autorun':
reactiveEntry.disposer = autorun(
reactiveEntry.view,
reactiveEntry.options,
);
break;
case 'reaction':
reactiveEntry.disposer = reaction(
reactiveEntry.expression,
reactiveEntry.effect,
reactiveEntry.options,
);
break;
}
});
}
}
/** @internal */
async _afterUpdate(triggeredByDescendants: boolean): Promise<void> {
for (let {callback, options} of this._afterUpdateEntrySet) {
if (triggeredByDescendants ? options && options.traceDescendants : true) {
tolerate(callback, {descendants: triggeredByDescendants});
}
}
let service = await this._getService();
if (service && service.afterUpdate) {
tolerate(() =>
service!.afterUpdate!({descendants: triggeredByDescendants}),
);
}
}
/** @internal */
_getMatchEntry(source: RouteSource): RouteMatchEntry | undefined {
let matchToMatchEntryMap = source.groupToMatchToMatchEntryMapMap.get(
this.$group,
);
return matchToMatchEntryMap && matchToMatchEntryMap.get(this);
}
/** @internal */
protected _getBuilder(): RouteBuilder {
return this.$router.$current;
}
/** @internal */
private async _getService(): Promise<IRouteService | undefined> {
let serviceOrServicePromise = this._service || this._servicePromise;
if (serviceOrServicePromise) {
return serviceOrServicePromise;
}
let factory = this._serviceFactory;
if (!factory) {
return undefined;
}
let output = tolerate(factory, this);
if (output instanceof Promise) {
return (this._servicePromise = output.then(service => {
this._service = service;
return service;
}));
} else {
this._service = output;
return output;
}
}
static SEGMENT = /[^/]+/;
static REST = /.*/;
} | the_stack |
import * as fs from 'fs-extra';
import * as path from 'path';
import { performance } from 'perf_hooks';
import { WorkspaceFolder } from 'vscode-languageserver';
import { URI } from 'vscode-uri';
import { CodeModDefinition, CodeModExports, CodeModScope } from '../codeModTypes';
import { noop } from '../utils/helpers';
import { requireDynamically } from '../utils/requireDynamically';
import astService, { LanguageId, Selection } from './astService';
import connectionService from './connectionService';
import logService from './logService';
function parseCodeMod(id: string, modFn: CodeModExports): CodeModDefinition {
return {
id,
name: modFn.title || id,
description: modFn.description || '',
detail: modFn.detail,
canRun: modFn.canRun || (() => true),
scope: (modFn.scope as CodeModScope) || CodeModScope.Global,
languageScope: modFn.languageScope,
modFn,
};
}
function loadEmbeddedCodeMods() {
// The logic is different for Webpack and Node environment. In production Webpack is used.
let result: CodeModDefinition[];
// eslint-disable-next-line @typescript-eslint/camelcase
if (typeof __webpack_require__ === 'function') {
let context = (require as any).context('../codemods', true, /\.ts$/);
result = context.keys().map((k: any) => parseCodeMod(k, context(k)));
} else {
// Different loading code when running from Node (Jest)
const embeddedCodeModDir = path.join(__dirname, '..', 'codemods');
const files = fs.readdirSync(embeddedCodeModDir);
const fileNames = files.map((name) => path.join(embeddedCodeModDir, name));
result = fileNames
.map((fileName) => {
if (!fileName.match(/(\.ts|\.js)$/)) {
return {
isFile: false,
fileName,
};
}
const stat = fs.lstatSync(fileName);
return {
isFile: stat.isFile(),
fileName,
};
})
.filter((x) => x.isFile)
.map((x) => {
const exp = requireDynamically(x.fileName);
const id = path.basename(x.fileName, path.extname(x.fileName));
return parseCodeMod(id, exp);
});
}
return result;
}
const embeddedCodeMods = loadEmbeddedCodeMods();
class CodeModService {
private _codeModsCache: CodeModDefinition[] | null = null;
public async reloadAllCodeMods(
workspaceFolders?: WorkspaceFolder[]
): Promise<CodeModDefinition[]> {
let codeMods = [...embeddedCodeMods];
// user-workspace code mods
const connection = connectionService.connection();
if (process.env.NODE_ENV !== 'test') {
logService.output(`Running in ${process.env.NODE_ENV} mode.`);
}
if (connection) {
// Connection is not established when running under Jest
const wsFolders = await connection.workspace.getWorkspaceFolders();
if (wsFolders) {
const codemodDir = connectionService.getSettings().codemodDir;
for (let folder of wsFolders!) {
const folderUri = URI.parse(folder.uri);
if (folderUri.scheme !== 'file') {
continue;
}
const dirName = path.join(folderUri.fsPath, codemodDir);
if (!(await fs.pathExists(dirName))) {
continue;
}
const names = await fs.readdir(dirName);
for (let n of names) {
const fn = path.join(dirName, n);
if ((await fs.stat(fn)).isFile()) {
let modFn: CodeModExports | null = null;
try {
modFn = requireDynamically(fn);
} catch (e) {
logService.outputError(
`Failed to parse codemod '${fn}': ${e.message}`
);
}
if (modFn) {
const id = path.basename(fn, path.extname(fn));
codeMods.push(parseCodeMod(id, modFn));
}
}
}
}
}
}
const validCodeMods = codeMods.filter((c) => c) as CodeModDefinition[];
validCodeMods.sort((a, b) => a.name.toLowerCase().localeCompare(b.name.toLowerCase()));
this._codeModsCache = validCodeMods;
if (process.env.NODE_ENV !== 'test') {
logService.output(`${validCodeMods.length} code actions loaded.`);
}
return validCodeMods;
}
public async getExecutableGlobalCodeMods(options: {
languageId: LanguageId;
fileName: string;
source: string;
selection: Selection;
}) {
try {
const mods = await this.getAllExecutableCodeMods(options);
return mods.filter((mod) => mod.scope === CodeModScope.Global);
} catch (e) {
logService.outputError(
`Error while executing [getGlobalMods].getRunnableCodeMods(): ${e.toString()}`
);
return [];
}
}
/**
* Returns code mods available under the cursor. Global code mods are excluded from the list.
*/
public async getExecutableCodeModsUnderCursor(options: {
languageId: LanguageId;
fileName: string;
source: string;
selection: Selection;
}) {
try {
const verbose = logService.getLogLevel() === 'verbose';
let startMs: number;
if (verbose) {
startMs = performance.now();
}
const mods = await this.getAllExecutableCodeMods(options);
const result = mods.filter((mod) => mod.scope === CodeModScope.Cursor);
if (verbose) {
const endMs = performance.now();
logService.output(
`Computed executable code actions in ${
Math.round((endMs - startMs!) * 1000) / 1000
}ms.`,
'verbose'
);
}
return result;
} catch (e) {
logService.outputError(
`Error while executing [getCodeActionMods].getRunnableCodeMods(): ${e.toString()}`
);
return [];
}
}
/**
* Returns all code mods (any scope) which are currently available given the source code.
*
* Global code mods do not analyse the cursor.
*
* Cursor code mods take the position of the cursor into account.
*/
public async getAllExecutableCodeMods(options: {
languageId: LanguageId;
fileName: string;
source: string;
selection: Selection;
include?: string[];
exclude?: string[];
}): Promise<CodeModDefinition[]> {
let mods = await this._getAllCodeMods();
if (options.include) {
mods = options.include!.map((id) => {
const mod = mods.find((m) => m.id === id);
if (!mod) {
throw new Error(`Mod ${id} not loaded.`);
}
return mod;
});
}
if (options.exclude) {
mods = mods.filter((m) => !options.exclude!.includes(m.id));
}
const jscodeshift = astService.getCodeShift(options.languageId);
const ast = astService.getAstTree(options);
if (!ast) {
return [];
}
const target = ast.findNodeAtPosition(options.selection.active);
let anchorTarget = target;
if (options.selection.active !== options.selection.anchor) {
anchorTarget = ast.findNodeAtPosition(options.selection.anchor);
}
return mods.filter((m) => {
try {
const r = m.canRun(
{
path: options.fileName,
source: options.source,
languageId: options.languageId,
ast,
},
{
jscodeshift,
stats: noop,
},
{
target,
anchorTarget,
selection: options.selection,
}
);
return r;
} catch (e) {
logService.outputError(
`Error while executing [${m.id}].canRun()\nAction name: ${
m.name
}\n${e.toString()}`
);
return false;
}
});
}
public executeTransform(
modId: string,
options: {
languageId: LanguageId;
fileName: string;
source: string;
selection: Selection;
}
): {
source: string;
selection?: Selection;
} {
const verbose = logService.getLogLevel() === 'verbose';
let startMs: number;
if (verbose) {
startMs = performance.now();
}
const mod = this._getCodeMod(modId);
const jscodeshift = astService.getCodeShift(options.languageId);
const ast = astService.getAstTree(options);
if (!ast) {
throw new Error('Syntax error');
}
const target = ast.findNodeAtPosition(options.selection.active);
let anchorTarget = target;
if (options.selection.active !== options.selection.anchor) {
anchorTarget = ast.findNodeAtPosition(options.selection.anchor);
}
let result;
result = mod.modFn(
{
path: options.fileName,
source: options.source,
languageId: options.languageId,
ast,
},
{
jscodeshift,
stats: noop,
},
{
target,
anchorTarget,
selection: options.selection,
}
);
astService.invalidateAstTree(options.fileName);
if (verbose) {
const endMs = performance.now();
logService.output(
`Executed code action "${mod.name}" in ${
Math.round((endMs - startMs!) * 1000) / 1000
}ms.`,
'verbose'
);
}
if (!result) {
return {
source: options.source,
};
}
if (typeof result === 'string') {
return {
source: result,
};
} else {
return {
source: result.source,
selection: result.selection,
};
}
}
private async _getAllCodeMods(): Promise<CodeModDefinition[]> {
if (this._codeModsCache) {
return this._codeModsCache;
}
await this.reloadAllCodeMods();
return this._codeModsCache!;
}
private _getCodeMod(modId: string): CodeModDefinition {
const mod = this._codeModsCache && this._codeModsCache.find((m) => m.id === modId);
if (!mod) {
throw new Error(`The requested mod ${modId} is missing in cache.`);
}
return mod;
}
}
export default new CodeModService(); | the_stack |
import * as d3Scale from 'd3-scale';
import { interpolateCubehelix } from 'd3-interpolate';
import { timeHour } from 'd3-time';
import { schemePuRd } from 'd3-scale-chromatic';
// -------------------------------------------------------------------------------
// Preparatory Steps
// -------------------------------------------------------------------------------
class NumCoercible {
a: number;
constructor(a: number) {
this.a = a;
}
valueOf() {
return this.a;
}
}
class StringCoercible {
txt: string;
constructor(txt: string) {
this.txt = txt;
}
toString() {
return this.txt;
}
}
let num: number;
let date: Date;
let clampFlag: boolean;
let outputNumber: number;
let outputString: string;
let domainNumbers: number[] = [1, 100];
const domainNumeric: NumCoercible[] = [new NumCoercible(0), new NumCoercible(100)];
let domainStrings: string[];
let domainDates: Date[] = [new Date(2016, 0, 15), new Date(2016, 5, 15)];
let ticksNumbers: number[];
let ticksDates: Date[];
let tickFormatNumberFn: ((d: number | { valueOf(): number }) => string);
let tickFormatDateFn: ((d: Date) => string);
let rangeNumbers: number[] = [2, 200];
let rangeStrings: string[] = ['2px', '200px'];
let numExtent: [number, number];
let numOrUndefinedExtent: [number | undefined, number | undefined];
let outputNumberMaybe: number | undefined;
// -------------------------------------------------------------------------------
// Linear Scale Factory
// -------------------------------------------------------------------------------
// scaleLinear() -----------------------------------------------------------------
let linearScaleNumber: d3Scale.ScaleLinear<number, number>;
let linearScaleString: d3Scale.ScaleLinear<string, string>;
let linearScaleNumString: d3Scale.ScaleLinear<number, string>;
linearScaleNumber = d3Scale.scaleLinear();
linearScaleString = d3Scale.scaleLinear<string>();
linearScaleNumString = d3Scale.scaleLinear<number, string>();
// ScaleLinear Interface ========================================================
// domain(...) -----------------------------------------------------------------
linearScaleNumber = linearScaleNumber.domain(domainNumeric);
linearScaleNumber = linearScaleNumber.domain(domainNumbers);
domainNumbers = linearScaleNumber.domain();
linearScaleString = linearScaleString.domain(domainNumeric);
linearScaleString = linearScaleString.domain([10, 100]);
domainNumbers = linearScaleString.domain();
linearScaleNumString = linearScaleNumString.domain(domainNumeric);
linearScaleNumString = linearScaleNumString.domain(domainNumbers);
domainNumbers = linearScaleNumString.domain();
// range(...) -----------------------------------------------------------------
linearScaleNumber = linearScaleNumber.range(rangeNumbers);
rangeNumbers = linearScaleNumber.range();
linearScaleString = linearScaleString.range(['steelblue', 'brown']);
rangeStrings = linearScaleString.range();
linearScaleNumString = linearScaleNumString.range(rangeNumbers);
rangeNumbers = linearScaleNumString.range();
// invert(...) -----------------------------------------------------------------
num = linearScaleNumber.invert(500); // has number range, so inversion is possible
num = linearScaleNumber.invert(new NumCoercible(500)); // has number range, so inversion is possible
num = linearScaleNumString.invert(500); // has number range, so inversion is possible
num = linearScaleNumString.invert(new NumCoercible(500)); // has number range, so inversion is possible
// rangeRound(...) -----------------------------------------------------------------
linearScaleNumber = linearScaleNumber.rangeRound(rangeNumbers);
// clamp(...) -----------------------------------------------------------------
linearScaleNumber = linearScaleNumber.clamp(true);
clampFlag = linearScaleNumber.clamp();
// interpolate(...) -----------------------------------------------------------------
linearScaleString = linearScaleString.interpolate(interpolateCubehelix.gamma(3));
linearScaleNumString = linearScaleNumString.interpolate((a, b) => {
// take two numbers
return (t: number) => (a * (1 - t) + b * t) + 'px'; // a and b are numbers based on Range Type, return value of interpolator is string based on Output type
});
// Changes scale output type (inferred generic)
linearScaleNumString = linearScaleNumber.interpolate((a, b) => {
// take two numbers
return (t: number) => (a * (1 - t) + b * t) + 'px'; // a and b are numbers based on Range Type, return value of interpolator is string based on Output type
});
// nice(...) -----------------------------------------------------------------------
// chainable
linearScaleNumber = linearScaleNumber.nice();
linearScaleNumber = linearScaleNumber.nice(5);
// ticks(...) -----------------------------------------------------------------
ticksNumbers = linearScaleNumber.ticks();
ticksNumbers = linearScaleNumber.ticks(5);
// tickFormat(...) -----------------------------------------------------------------
tickFormatNumberFn = linearScaleNumber.tickFormat();
tickFormatNumberFn = linearScaleNumber.tickFormat(5);
tickFormatNumberFn = linearScaleNumber.tickFormat(5, '+%');
// (...) value mapping from domain to output -----------------------------------
outputNumber = linearScaleNumber(10);
outputString = linearScaleString(10);
outputString = linearScaleNumString(10);
// copy(...) -----------------------------------------------------------------
const copiedLinearScale: d3Scale.ScaleLinear<number, string> = linearScaleNumString.copy();
// -------------------------------------------------------------------------------
// Power Scale Factories
// -------------------------------------------------------------------------------
// scalePow() and scaleSqrt() ----------------------------------------------------
let powerScaleNumber: d3Scale.ScalePower<number, number>;
let powerScaleString: d3Scale.ScalePower<string, string>;
let powerScaleNumString: d3Scale.ScalePower<number, string>;
powerScaleNumber = d3Scale.scalePow();
powerScaleString = d3Scale.scalePow<string>();
powerScaleNumString = d3Scale.scalePow<number, string>();
let squarerootScaleNumber: d3Scale.ScalePower<number, number>;
let squarerootScaleString: d3Scale.ScalePower<string, string>;
let squarerootScaleNumString: d3Scale.ScalePower<number, string>;
squarerootScaleNumber = d3Scale.scaleSqrt();
squarerootScaleString = d3Scale.scaleSqrt<string>();
squarerootScaleNumString = d3Scale.scaleSqrt<number, string>();
// ScalePower Interface ========================================================
// exponent --------------------------------------------------------------------
const exponent: number = squarerootScaleNumber.exponent();
powerScaleNumber = powerScaleNumber.exponent(5);
// domain(...) -----------------------------------------------------------------
powerScaleNumber = powerScaleNumber.domain(domainNumeric);
powerScaleNumber = powerScaleNumber.domain(domainNumbers);
domainNumbers = powerScaleNumber.domain();
powerScaleString = powerScaleString.domain(domainNumeric);
powerScaleString = powerScaleString.domain([10, 100]);
domainNumbers = powerScaleString.domain();
powerScaleNumString = powerScaleNumString.domain(domainNumeric);
powerScaleNumString = powerScaleNumString.domain(domainNumbers);
domainNumbers = powerScaleNumString.domain();
// range(...) -----------------------------------------------------------------
powerScaleNumber = powerScaleNumber.range(rangeNumbers);
rangeNumbers = powerScaleNumber.range();
powerScaleString = powerScaleString.range(['steelblue', 'brown']);
rangeStrings = powerScaleString.range();
powerScaleNumString = powerScaleNumString.range(rangeNumbers);
rangeNumbers = powerScaleNumString.range();
// invert(...) -----------------------------------------------------------------
num = powerScaleNumber.invert(500); // has number range, so inversion is possible
num = powerScaleNumber.invert(new NumCoercible(500)); // has number range, so inversion is possible
num = powerScaleNumString.invert(500); // has number range, so inversion is possible
num = powerScaleNumString.invert(new NumCoercible(500)); // has number range, so inversion is possible
// rangeRound(...) -----------------------------------------------------------------
powerScaleNumber = powerScaleNumber.rangeRound(rangeNumbers);
// clamp(...) -----------------------------------------------------------------
powerScaleNumber = powerScaleNumber.clamp(true);
clampFlag = powerScaleNumber.clamp();
// interpolate(...) -----------------------------------------------------------------
powerScaleString = powerScaleString.interpolate(interpolateCubehelix.gamma(3));
powerScaleNumString = powerScaleNumString.interpolate((a, b) => {
// take two numbers
return (t: number) => (a * (1 - t) + b * t) + 'px'; // a and b are numbers based on Range Type, return value of interpolator is string based on Output type
});
// nice(...) -----------------------------------------------------------------------
// chainable
powerScaleNumber = powerScaleNumber.nice();
powerScaleNumber = powerScaleNumber.nice(5);
// ticks(...) -----------------------------------------------------------------
ticksNumbers = powerScaleNumber.ticks();
ticksNumbers = powerScaleNumber.ticks(5);
// tickFormat(...) -----------------------------------------------------------------
tickFormatNumberFn = powerScaleNumber.tickFormat();
tickFormatNumberFn = powerScaleNumber.tickFormat(5);
tickFormatNumberFn = powerScaleNumber.tickFormat(5, '+%');
// (...) value mapping from domain to output -----------------------------------
outputNumber = powerScaleNumber(10);
outputString = powerScaleString(10);
outputString = powerScaleNumString(10);
// copy(...) -----------------------------------------------------------------
const copiedPowerScale: d3Scale.ScalePower<number, string> = powerScaleNumString.copy();
// -------------------------------------------------------------------------------
// Logarithmic Scale Factory
// -------------------------------------------------------------------------------
// scaleLog() ---------------------------------------------------------------------
let logScaleNumber: d3Scale.ScaleLogarithmic<number, number>;
let logScaleString: d3Scale.ScaleLogarithmic<string, string>;
let logScaleNumString: d3Scale.ScaleLogarithmic<number, string>;
logScaleNumber = d3Scale.scaleLog();
logScaleString = d3Scale.scaleLog<string>();
logScaleNumString = d3Scale.scaleLog<number, string>();
// ScaleLogarithmic Interface ========================================================
// base --------------------------------------------------------------------
const base: number = logScaleNumber.base();
logScaleNumber = logScaleNumber.base(42);
// domain(...) -----------------------------------------------------------------
logScaleNumber = logScaleNumber.domain(domainNumeric);
logScaleNumber = logScaleNumber.domain(domainNumbers);
domainNumbers = logScaleNumber.domain();
logScaleString = logScaleString.domain(domainNumeric);
logScaleString = logScaleString.domain([10, 100]);
domainNumbers = logScaleString.domain();
logScaleNumString = logScaleNumString.domain(domainNumeric);
logScaleNumString = logScaleNumString.domain(domainNumbers);
domainNumbers = logScaleNumString.domain();
// range(...) -----------------------------------------------------------------
logScaleNumber = logScaleNumber.range(rangeNumbers);
rangeNumbers = logScaleNumber.range();
logScaleString = logScaleString.range(['steelblue', 'brown']);
rangeStrings = logScaleString.range();
logScaleNumString = logScaleNumString.range(rangeNumbers);
rangeNumbers = logScaleNumString.range();
// invert(...) -----------------------------------------------------------------
num = logScaleNumber.invert(500); // has number range, so inversion is possible
num = logScaleNumber.invert(new NumCoercible(500)); // has number range, so inversion is possible
num = logScaleNumString.invert(500); // has number range, so inversion is possible
num = logScaleNumString.invert(new NumCoercible(500)); // has number range, so inversion is possible
// rangeRound(...) -----------------------------------------------------------------
logScaleNumber = logScaleNumber.rangeRound(rangeNumbers);
// clamp(...) -----------------------------------------------------------------
logScaleNumber = logScaleNumber.clamp(true);
clampFlag = logScaleNumber.clamp();
// interpolate(...) -----------------------------------------------------------------
logScaleString = logScaleString.interpolate(interpolateCubehelix.gamma(3));
logScaleNumString = logScaleNumString.interpolate((a, b) => {
// take two numbers
return (t: number) => (a * (1 - t) + b * t) + 'px'; // a and b are numbers based on Range Type, return value of interpolator is string based on Output type
});
// nice(...) -----------------------------------------------------------------------
// chainable
logScaleNumber = logScaleNumber.nice();
// logScaleNumber = logScaleNumber.nice(5); // fails, logarithmic scale does not support count parameter.
// ticks(...) -----------------------------------------------------------------
ticksNumbers = logScaleNumber.ticks();
ticksNumbers = logScaleNumber.ticks(5);
// tickFormat(...) -----------------------------------------------------------------
tickFormatNumberFn = logScaleNumber.tickFormat();
tickFormatNumberFn = logScaleNumber.tickFormat(5);
tickFormatNumberFn = logScaleNumber.tickFormat(5, '+%');
// (...) value mapping from domain to output -----------------------------------
outputNumber = logScaleNumber(10);
outputString = logScaleString(10);
outputString = logScaleNumString(10);
// copy(...) -----------------------------------------------------------------
const copiedLogScale: d3Scale.ScaleLogarithmic<number, string> = logScaleNumString.copy();
// -------------------------------------------------------------------------------
// Identity Scale Factory
// -------------------------------------------------------------------------------
// scaleIdentity -----------------------------------------------------------------
let identityScale: d3Scale.ScaleIdentity;
identityScale = d3Scale.scaleIdentity();
// ScaleIdentity Interface ========================================================
// domain(...) -----------------------------------------------------------------
identityScale = identityScale.domain(domainNumeric);
identityScale = identityScale.domain(domainNumbers);
domainNumbers = identityScale.domain();
// range(...) -----------------------------------------------------------------
identityScale = identityScale.range(rangeNumbers);
rangeNumbers = identityScale.range();
// invert(...) -----------------------------------------------------------------
num = identityScale.invert(500); // has number range, so inversion is possible
num = identityScale.invert(new NumCoercible(500)); // has number range, so inversion is possible
// nice(...) -----------------------------------------------------------------------
// chainable
identityScale = identityScale.nice();
identityScale = identityScale.nice(5);
// ticks(...) -----------------------------------------------------------------
ticksNumbers = identityScale.ticks();
ticksNumbers = identityScale.ticks(5);
// tickFormat(...) -----------------------------------------------------------------
tickFormatNumberFn = identityScale.tickFormat();
tickFormatNumberFn = identityScale.tickFormat(5);
tickFormatNumberFn = identityScale.tickFormat(5, '+%');
// (...) value mapping from domain to output -----------------------------------
outputNumber = identityScale(10);
// copy(...) -----------------------------------------------------------------
const copiedIdentityScale: d3Scale.ScaleIdentity = identityScale.copy();
// -------------------------------------------------------------------------------
// Time Scale Factories
// -------------------------------------------------------------------------------
// scaleTime() and scaleUtc() ----------------------------------------------------
let localTimeScaleNumber: d3Scale.ScaleTime<number, number>;
let localTimeScaleString: d3Scale.ScaleTime<string, string>;
let localTimeScaleNumString: d3Scale.ScaleTime<number, string>;
localTimeScaleNumber = d3Scale.scaleTime();
localTimeScaleString = d3Scale.scaleTime<string>();
localTimeScaleNumString = d3Scale.scaleTime<number, string>();
let utcScaleNumber: d3Scale.ScaleTime<number, number>;
let utcScaleString: d3Scale.ScaleTime<string, string>;
let utcScaleNumString: d3Scale.ScaleTime<number, string>;
utcScaleNumber = d3Scale.scaleUtc();
utcScaleString = d3Scale.scaleUtc<string>();
utcScaleNumString = d3Scale.scaleUtc<number, string>();
// domain(...) -----------------------------------------------------------------
localTimeScaleNumber = localTimeScaleNumber.domain(domainDates);
domainDates = localTimeScaleNumber.domain();
localTimeScaleString = localTimeScaleString.domain([new Date(2016, 6, 1), Date.now()]);
domainDates = localTimeScaleString.domain();
localTimeScaleNumString = localTimeScaleNumString.domain(domainDates);
domainDates = localTimeScaleNumString.domain();
// range(...) -----------------------------------------------------------------
localTimeScaleNumber = localTimeScaleNumber.range(rangeNumbers);
rangeNumbers = localTimeScaleNumber.range();
localTimeScaleString = localTimeScaleString.range(['steelblue', 'brown']);
rangeStrings = localTimeScaleString.range();
localTimeScaleNumString = localTimeScaleNumString.range(rangeNumbers);
rangeNumbers = localTimeScaleNumString.range();
// invert(...) -----------------------------------------------------------------
date = localTimeScaleNumber.invert(500); // has number range, so inversion is possible
date = localTimeScaleNumber.invert(new NumCoercible(500)); // has number range, so inversion is possible
date = localTimeScaleNumString.invert(500); // has number range, so inversion is possible
date = localTimeScaleNumString.invert(new NumCoercible(500)); // has number range, so inversion is possible
// rangeRound(...) -----------------------------------------------------------------
localTimeScaleNumber = localTimeScaleNumber.rangeRound(rangeNumbers);
// clamp(...) -----------------------------------------------------------------
localTimeScaleNumber = localTimeScaleNumber.clamp(true);
clampFlag = localTimeScaleNumber.clamp();
// interpolate(...) -----------------------------------------------------------------
localTimeScaleString = localTimeScaleString.interpolate(interpolateCubehelix.gamma(3));
localTimeScaleNumString = localTimeScaleNumString.interpolate((a, b) => {
// take two numbers
return (t: number) => (a * (1 - t) + b * t) + 'px'; // a and b are numbers based on Range Type, return value of interpolator is string based on Output type
});
// nice(...) -----------------------------------------------------------------------
// chainable
localTimeScaleNumber = localTimeScaleNumber.nice();
localTimeScaleNumber = localTimeScaleNumber.nice(5);
localTimeScaleNumber = localTimeScaleNumber.nice(timeHour);
localTimeScaleNumber = localTimeScaleNumber.nice(timeHour, 5);
// localTimeScaleNumber = localTimeScaleNumber.nice(timeHour.every(5)); // fails, requires CountableTimeInterval
// ticks(...) -----------------------------------------------------------------
const timeInterval = timeHour.every(5);
ticksDates = localTimeScaleNumber.ticks();
ticksDates = localTimeScaleNumber.ticks(50);
if (timeInterval !== null) {
ticksDates = localTimeScaleNumString.ticks(timeInterval);
}
// tickFormat(...) -----------------------------------------------------------------
tickFormatDateFn = localTimeScaleNumber.tickFormat();
tickFormatDateFn = localTimeScaleNumber.tickFormat(50, '%I %p');
if (timeInterval !== null) {
tickFormatDateFn = localTimeScaleNumber.tickFormat(timeInterval, '%I %p');
}
// (...) value mapping from domain to output -----------------------------------
outputNumber = localTimeScaleNumber(new Date(2016, 6, 4));
outputString = localTimeScaleString(new Date(2016, 6, 4));
outputString = localTimeScaleNumString(new Date(2016, 6, 4));
// copy(...) -----------------------------------------------------------------
const copiedTimeScale: d3Scale.ScaleTime<number, string> = localTimeScaleNumString.copy();
// -------------------------------------------------------------------------------
// Sequential Scale Factory
// -------------------------------------------------------------------------------
// scaleSequential() -----------------------------------------------------------------
let sequentialScaleColorString: d3Scale.ScaleSequential<string>;
sequentialScaleColorString = d3Scale.scaleSequential<string>(d3Scale.interpolateRainbow);
sequentialScaleColorString = d3Scale.scaleSequential(d3Scale.interpolateCool); // inferred Output type string
// ScaleSequential Interface ========================================================
// domain(...) -----------------------------------------------------------------
sequentialScaleColorString = sequentialScaleColorString.domain([0, 1]);
sequentialScaleColorString = sequentialScaleColorString.domain([new NumCoercible(0), new NumCoercible(100)]);
const domainSequential: [number, number] = sequentialScaleColorString.domain();
// clamp(...) -----------------------------------------------------------------
sequentialScaleColorString = sequentialScaleColorString.clamp(true);
clampFlag = sequentialScaleColorString.clamp();
// interpolate(...) -----------------------------------------------------------------
sequentialScaleColorString = sequentialScaleColorString.interpolator(d3Scale.interpolateInferno);
let sequentialInterpolator: (t: number) => string;
sequentialInterpolator = sequentialScaleColorString.interpolator();
// (...) value mapping from domain to output -----------------------------------
outputString = sequentialScaleColorString(10);
// copy(...) -----------------------------------------------------------------
const copiedSequentialScale: d3Scale.ScaleSequential<string> = sequentialScaleColorString.copy();
// -------------------------------------------------------------------------------
// Color Interpolators for Sequential Scale Factory
// -------------------------------------------------------------------------------
let colorInterpolator: ((t: number) => string);
colorInterpolator = d3Scale.interpolateViridis;
colorInterpolator = d3Scale.interpolateMagma;
colorInterpolator = d3Scale.interpolateInferno;
colorInterpolator = d3Scale.interpolatePlasma;
colorInterpolator = d3Scale.interpolateRainbow;
colorInterpolator = d3Scale.interpolateWarm;
colorInterpolator = d3Scale.interpolateCool;
colorInterpolator = d3Scale.interpolateCubehelixDefault;
// -------------------------------------------------------------------------------
// Quantize Scale Factory
// -------------------------------------------------------------------------------
// scaleQuantize() -----------------------------------------------------------------
let quantizeScaleNumber: d3Scale.ScaleQuantize<number>;
let quantizeScaleString: d3Scale.ScaleQuantize<string>;
quantizeScaleNumber = d3Scale.scaleQuantize();
quantizeScaleString = d3Scale.scaleQuantize<string>();
// ScaleQuantize Interface ========================================================
// domain(...) -----------------------------------------------------------------
quantizeScaleNumber = quantizeScaleNumber.domain([0, 1]);
quantizeScaleNumber = quantizeScaleNumber.domain([new NumCoercible(0), new NumCoercible(100)]);
const domainQuantize: [number, number] = quantizeScaleNumber.domain();
// range(...) -----------------------------------------------------------------
quantizeScaleNumber = quantizeScaleNumber.range(rangeNumbers);
rangeNumbers = quantizeScaleNumber.range();
quantizeScaleString = quantizeScaleString.range(['steelblue', 'brown']);
rangeStrings = quantizeScaleString.range();
// invertExtent(...) -----------------------------------------------------------------
numExtent = quantizeScaleNumber.invertExtent(500);
numExtent = quantizeScaleString.invertExtent('steelblue');
// nice(...) -----------------------------------------------------------------------
// chainable
quantizeScaleNumber = quantizeScaleNumber.nice();
quantizeScaleNumber = quantizeScaleNumber.nice(5);
// ticks(...) -----------------------------------------------------------------
ticksNumbers = quantizeScaleNumber.ticks();
ticksNumbers = quantizeScaleNumber.ticks(5);
// tickFormat(...) -----------------------------------------------------------------
tickFormatNumberFn = quantizeScaleNumber.tickFormat();
tickFormatNumberFn = quantizeScaleNumber.tickFormat(5);
tickFormatNumberFn = quantizeScaleNumber.tickFormat(5, '+%');
// (...) value mapping from domain to output -----------------------------------
outputNumber = quantizeScaleNumber(0.51);
// copy(...) -----------------------------------------------------------------
const copiedQuantizeScale: d3Scale.ScaleQuantize<number> = quantizeScaleNumber.copy();
// -------------------------------------------------------------------------------
// Quantile Scale Factory
// -------------------------------------------------------------------------------
// scaleQuantile() -----------------------------------------------------------------
let quantileScaleNumber: d3Scale.ScaleQuantile<number>;
let quantileScaleString: d3Scale.ScaleQuantile<string>;
quantileScaleNumber = d3Scale.scaleQuantile();
quantileScaleString = d3Scale.scaleQuantile<string>();
// ScaleQuantile Interface ========================================================
// domain(...) -----------------------------------------------------------------
quantileScaleNumber = quantileScaleNumber.domain(domainNumbers);
domainNumbers = quantileScaleNumber.domain();
quantileScaleString = quantileScaleString.domain(domainNumeric);
// range(...) -----------------------------------------------------------------
quantileScaleNumber = quantileScaleNumber.range([1, 2, 3, 4]);
rangeNumbers = quantileScaleNumber.range();
quantileScaleString = quantileScaleString.range(['q25', 'q50', 'q75']);
rangeStrings = quantileScaleString.range();
// invertExtent(...) -----------------------------------------------------------------
numExtent = quantileScaleNumber.invertExtent(2);
numExtent = quantileScaleString.invertExtent('q50');
// quantile() -----------------------------------------------------------------------
const quantiles: number[] = quantileScaleNumber.quantiles();
// (...) value mapping from domain to output -----------------------------------
outputNumber = quantileScaleNumber(0.51);
// copy(...) -----------------------------------------------------------------
const copiedQuantileScale: d3Scale.ScaleQuantile<number> = quantileScaleNumber.copy();
// -------------------------------------------------------------------------------
// Threshold Scale Factory
// -------------------------------------------------------------------------------
// scaleThreshold() -----------------------------------------------------------------
let thresholdScaleNumberNumber: d3Scale.ScaleThreshold<number, number>;
let thresholdScaleNumberString: d3Scale.ScaleThreshold<number, string>;
thresholdScaleNumberNumber = d3Scale.scaleThreshold();
thresholdScaleNumberString = d3Scale.scaleThreshold<number, string>();
// ScaleThreshold Interface ========================================================
// domain(...) -----------------------------------------------------------------
thresholdScaleNumberNumber = thresholdScaleNumberNumber.domain([0.5]);
domainNumbers = thresholdScaleNumberNumber.domain();
thresholdScaleNumberString = thresholdScaleNumberString.domain([0.2, 0.8]);
// range(...) -----------------------------------------------------------------
thresholdScaleNumberNumber = thresholdScaleNumberNumber.range([100, 200]);
rangeNumbers = thresholdScaleNumberNumber.range();
thresholdScaleNumberString = thresholdScaleNumberString.range(['steelblue', 'seagreen', 'brown']);
rangeStrings = thresholdScaleNumberString.range();
// invertExtent(...) -----------------------------------------------------------------
numOrUndefinedExtent = thresholdScaleNumberNumber.invertExtent(100);
numOrUndefinedExtent = thresholdScaleNumberString.invertExtent('seagreen');
// (...) value mapping from domain to output -----------------------------------
outputNumber = thresholdScaleNumberNumber(0.51);
outputString = thresholdScaleNumberString(0.9);
// copy(...) -----------------------------------------------------------------
const copiedThresholdScale: d3Scale.ScaleThreshold<number, string> = thresholdScaleNumberString.copy();
// -------------------------------------------------------------------------------
// Ordinal Scale Factory
// -------------------------------------------------------------------------------
// scaleOrdinal() -----------------------------------------------------------------
let ordinalScaleStringString: d3Scale.ScaleOrdinal<string, string>;
let ordinalScaleStringNumber: d3Scale.ScaleOrdinal<string, number>;
ordinalScaleStringString = d3Scale.scaleOrdinal<string>();
ordinalScaleStringString = d3Scale.scaleOrdinal<string>(schemePuRd[3]);
ordinalScaleStringNumber = d3Scale.scaleOrdinal<string, number>();
ordinalScaleStringString = d3Scale.scaleOrdinal<string, string>(schemePuRd[3]);
// ScaleOrdinal Interface ========================================================
// domain(...) -----------------------------------------------------------------
ordinalScaleStringString = ordinalScaleStringString.domain(['negative', 'neutral', 'positive']);
domainStrings = ordinalScaleStringString.domain();
ordinalScaleStringNumber = ordinalScaleStringNumber.domain(['negative', 'neutral', 'positive']);
// range(...) -----------------------------------------------------------------
ordinalScaleStringString = ordinalScaleStringString.range(['crimson', 'midnightblue', 'seagreen']);
ordinalScaleStringString = ordinalScaleStringString.range(schemePuRd[3]);
rangeStrings = ordinalScaleStringString.range();
ordinalScaleStringNumber = ordinalScaleStringNumber.range([-1, 0, 1]);
rangeNumbers = ordinalScaleStringNumber.range();
// unknown(...) and d3Scale.scaleImplicit --------------------------------------
const implicit: { name: 'implicit' } = d3Scale.scaleImplicit;
ordinalScaleStringString = ordinalScaleStringString.unknown(d3Scale.scaleImplicit);
ordinalScaleStringNumber = ordinalScaleStringNumber.unknown(0);
const unknownValue: string | { name: 'implicit' } = ordinalScaleStringString.unknown();
if (typeof unknownValue === 'string') {
console.log(unknownValue);
} else {
console.log(unknownValue.name);
}
// (...) value mapping from domain to output -----------------------------------
outputString = ordinalScaleStringString('neutral');
outputNumber = ordinalScaleStringNumber('negative');
// copy(...) -----------------------------------------------------------------
const copiedOrdinalScale: d3Scale.ScaleOrdinal<string, number> = ordinalScaleStringNumber.copy();
// -------------------------------------------------------------------------------
// Band Scale Factory
// -------------------------------------------------------------------------------
// scaleBand() -----------------------------------------------------------------
let bandScaleString: d3Scale.ScaleBand<string>;
let bandScaleCoercible: d3Scale.ScaleBand<StringCoercible>;
bandScaleString = d3Scale.scaleBand();
bandScaleCoercible = d3Scale.scaleBand<StringCoercible>();
// ScaleBand Interface ========================================================
// domain(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.domain(['negative', 'neutral', 'positive']);
domainStrings = bandScaleString.domain();
bandScaleCoercible = bandScaleCoercible.domain([new StringCoercible('negative'), new StringCoercible('neutral'), new StringCoercible('positive')]);
// range(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.range([0, 300]);
let rangeExtent: [number, number] = bandScaleString.range();
bandScaleCoercible = bandScaleCoercible.range([0, 300]);
rangeExtent = bandScaleCoercible.range();
// rangeRound(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.rangeRound([0, 300]);
// round(...) -----------------------------------------------------------------
bandScaleCoercible = bandScaleCoercible.round(true);
let roundingFlag: boolean = bandScaleCoercible.round();
// paddingInner(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.paddingInner(0.1);
num = bandScaleString.paddingInner();
// paddingOuter(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.paddingOuter(0.1);
num = bandScaleString.paddingOuter();
// padding(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.padding(0.1);
num = bandScaleString.padding();
// align(...) -----------------------------------------------------------------
bandScaleString = bandScaleString.align(0.5);
num = bandScaleString.align();
// bandwidth(...) -----------------------------------------------------------------
num = bandScaleString.bandwidth();
// step(...) -----------------------------------------------------------------
num = bandScaleString.step();
// (...) value mapping from domain to output -----------------------------------
outputNumberMaybe = bandScaleString('neutral');
outputNumberMaybe = bandScaleCoercible(new StringCoercible('negative'));
// copy(...) -----------------------------------------------------------------
const copiedBandScale: d3Scale.ScaleBand<StringCoercible> = bandScaleCoercible.copy();
// -------------------------------------------------------------------------------
// Point Scale Factory
// -------------------------------------------------------------------------------
// scalePoint() -----------------------------------------------------------------
let pointScaleString: d3Scale.ScalePoint<string>;
let pointScaleCoercible: d3Scale.ScalePoint<StringCoercible>;
pointScaleString = d3Scale.scalePoint();
pointScaleCoercible = d3Scale.scalePoint<StringCoercible>();
// ScalePoint Interface ========================================================
// domain(...) -----------------------------------------------------------------
pointScaleString = pointScaleString.domain(['negative', 'neutral', 'positive']);
domainStrings = pointScaleString.domain();
pointScaleCoercible = pointScaleCoercible.domain([new StringCoercible('negative'), new StringCoercible('neutral'), new StringCoercible('positive')]);
// range(...) -----------------------------------------------------------------
pointScaleString = pointScaleString.range([0, 300]);
rangeExtent = pointScaleString.range();
pointScaleCoercible = pointScaleCoercible.range([0, 300]);
rangeExtent = pointScaleCoercible.range();
// rangeRound(...) -----------------------------------------------------------------
pointScaleString = pointScaleString.rangeRound([0, 300]);
// round(...) -----------------------------------------------------------------
pointScaleCoercible = pointScaleCoercible.round(true);
roundingFlag = pointScaleCoercible.round();
// padding(...) -----------------------------------------------------------------
pointScaleString = pointScaleString.padding(0.1);
num = pointScaleString.padding();
// align(...) -----------------------------------------------------------------
pointScaleString = pointScaleString.align(0.5);
num = pointScaleString.align();
// bandwidth(...) -----------------------------------------------------------------
num = pointScaleString.bandwidth();
// step(...) -----------------------------------------------------------------
num = pointScaleString.step();
// (...) value mapping from domain to output -----------------------------------
outputNumberMaybe = pointScaleString('neutral');
outputNumberMaybe = pointScaleCoercible(new StringCoercible('negative'));
// copy(...) -----------------------------------------------------------------
const copiedPointScale: d3Scale.ScalePoint<StringCoercible> = pointScaleCoercible.copy();
// -------------------------------------------------------------------------------
// Categorical Color Schemas for Ordinal Scales
// -------------------------------------------------------------------------------
let colorStrings: string[];
colorStrings = d3Scale.schemeCategory10;
colorStrings = d3Scale.schemeCategory20;
colorStrings = d3Scale.schemeCategory20b;
colorStrings = d3Scale.schemeCategory20c; | the_stack |
import { ApiClient } from '../.'
import BigNumber from 'bignumber.js'
export enum Mode {
UNSET = 'UNSET',
ECONOMICAL = 'ECONOMICAL',
CONSERVATIVE = 'CONSERVATIVE'
}
export enum AddressType {
LEGACY = 'legacy',
P2SH_SEGWIT = 'p2sh-segwit',
BECH32 = 'bech32'
}
export enum ScriptType {
NONSTANDARD = 'nonstandard',
PUBKEY = 'pubkey',
PUBKEYHASH = 'pubkeyhash',
SCRIPTHASH = 'scripthash',
MULTISIG = 'multisig',
NULLDATA = 'nulldata',
WITNESS_V0_KEYHASH = 'witness_v0_keyhash',
WITNESS_UNKNOWN = 'witness_unknown',
}
export enum WalletFlag {
AVOID_REUSE = 'avoid_reuse'
}
export enum BIP125 {
YES = 'yes',
NO = 'no',
UNKNOWN = 'unknown'
}
export enum InWalletTransactionCategory {
SEND = 'send',
RECEIVE = 'receive',
GENERATE = 'generate',
IMMATURE = 'immature',
ORPHAN = 'orphan'
}
/**
* Wallet RPCs for DeFi Blockchain
*/
export class Wallet {
private readonly client: ApiClient
constructor (client: ApiClient) {
this.client = client
}
/**
* Returns the total available balance in wallet.
*
* @param {number} minimumConfirmation to include transactions confirmed at least this many times
* @param {boolean} includeWatchOnly for watch-only wallets
* @return Promise<BigNumber>
*/
async getBalance (minimumConfirmation: number = 0, includeWatchOnly: boolean = false): Promise<BigNumber> {
return await this.client.call('getbalance', ['*', minimumConfirmation, includeWatchOnly], 'bignumber')
}
/**
* Identical to getBalance to get untrusted pending balance
*
* @return Promise<BigNumber>
*/
async getUnconfirmedBalance (): Promise<BigNumber> {
return await this.client.call('getunconfirmedbalance', [false], 'bignumber')
}
/**
* Returns an object with all balances.
*
* @return {Promise<WalletBalances>}
*/
async getBalances (): Promise<WalletBalances> {
return await this.client.call('getbalances', [false], 'bignumber')
}
/**
* Get list of UTXOs in wallet.
*
* @param {number} minimumConfirmation default = 1, to filter
* @param {number} maximumConfirmation default = 9999999, to filter
* @param {ListUnspentOptions} [options]
* @param {string[]} [options.addresses] to filter
* @param {boolean} [options.includeUnsafe=true] default = true, include outputs that are not safe to spend
* @param {ListUnspentQueryOptions} [options.queryOptions]
* @param {number} [options.queryOptions.minimumAmount] default = 0, minimum value of each UTXO
* @param {number} [options.queryOptions.maximumAmount] default is 'unlimited', maximum value of each UTXO
* @param {number} [options.queryOptions.maximumCount] default is 'unlimited', maximum number of UTXOs
* @param {number} [options.queryOptions.minimumSumAmount] default is 'unlimited', minimum sum valie of all UTXOs
* @param {string} [options.queryOptions.tokenId] default is 'all', filter by token
* @return {Promise<UTXO[]>}
*/
async listUnspent (
minimumConfirmation = 1,
maximumConfirmation = 9999999,
options: ListUnspentOptions = {}
): Promise<UTXO[]> {
const { addresses = [], includeUnsafe = true, queryOptions = {} } = options
return await this.client.call(
'listunspent',
[
minimumConfirmation, maximumConfirmation,
addresses, includeUnsafe, queryOptions
],
{ amount: 'bignumber' }
)
}
/**
* Create a new wallet
*
* @param {string} walletName
* @param {boolean} disablePrivateKeys
* @param {CreateWalletOptions} [options]
* @param {boolean} [options.blank]
* @param {string} [options.passphrase]
* @param {boolean} [options.avoidReuse]
* @return {Promise<CreateWalletResult>}
*/
async createWallet (
walletName: string,
disablePrivateKeys = false,
options: CreateWalletOptions = {}
): Promise<CreateWalletResult> {
const { blank = false, passphrase = '', avoidReuse = false } = options
return await this.client.call(
'createwallet',
[walletName, disablePrivateKeys, blank, passphrase, avoidReuse],
'number'
)
}
/**
* Return object containing various wallet state info
*
* @return {Promise<WalletInfo>}
*/
async getWalletInfo (): Promise<WalletInfo> {
return await this.client.call('getwalletinfo', [], {
balance: 'bignumber',
unconfirmed_balance: 'bignumber',
immature_balance: 'bignumber',
paytxfee: 'bignumber'
})
}
/**
* Change the state of the given wallet flag for a wallet
*
* @param {WalletFlag} flag to change. eg: avoid_reuse
* @param {boolean} value optional, default = true
* @return {Promise<WalletFlagResult>}
*/
async setWalletFlag (flag: WalletFlag, value: boolean = true): Promise<WalletFlagResult> {
return await this.client.call('setwalletflag', [flag, value], 'number')
}
/**
* Returns a new DeFi address for receiving payments.
* If 'label' is specified, it's added to the address book
* so payments received with the address will be associated with 'label'
*
* @param {string} label for address to be linked to. It can also be set as empty string
* @param {AddressType} addressType to use, eg: legacy, p2sh-segwit, bech32
* @return {Promise<string>}
*/
async getNewAddress (label: string = '', addressType = AddressType.BECH32): Promise<string> {
return await this.client.call('getnewaddress', [label, addressType], 'number')
}
/**
* Validate and return information about the given DFI address
*
* @param {string} address
* @return {Promise<ValidateAddressResult>}
*/
async validateAddress (address: string): Promise<ValidateAddressResult> {
return await this.client.call('validateaddress', [address], 'number')
}
/**
* Return information about the given address
*
* @param {string} address
* @return {Promise<AddressInfo>}
*/
async getAddressInfo (address: string): Promise<AddressInfo> {
return await this.client.call('getaddressinfo', [address], 'number')
}
/**
* Send an amount to given address and return a transaction id
*
* @param {string} address
* @param {number} amount
* @param {SendToAddressOptions} [options]
* @param {string} [options.comment]
* @param {string} [options.commentTo]
* @param {boolean} [options.subtractFeeFromAmount]
* @param {boolean} [options.replaceable]
* @param {number} [options.confTarget]
* @param {Mode} [options.estimateMode]
* @param {boolean} [options.avoidReuse]
* @return {Promise<string>}
*/
async sendToAddress (
address: string,
amount: number,
options: SendToAddressOptions = {}
): Promise<string> {
const {
comment = '',
commentTo = '',
subtractFeeFromAmount = false,
replaceable = false,
confTarget = 6,
estimateMode = Mode.UNSET,
avoidReuse = false
} = options
return await this.client.call(
'sendtoaddress',
[
address, amount, comment, commentTo, subtractFeeFromAmount,
replaceable, confTarget, estimateMode, avoidReuse
],
'bignumber'
)
}
/**
* Lists groups of addresses which have had their common ownership made public
* by common use as inputs or as the resulting change in past transactions
*
* @return {Promise<any[][][]>}
*/
async listAddressGroupings (): Promise<any[][][]> {
return await this.client.call('listaddressgroupings', [], 'bignumber')
}
/**
* Send given amounts to multiple given address and return a transaction id.
*
* @param {Record<string, number>} amounts Dictionary/map with individual addresses and amounts
* @param {string[]} subtractfeefrom Array of addresses from which fee needs to be deducted.
* @param {SendManyOptions} options
* @param {string} [options.comment] A comment
* @param {boolean} [options.replaceable] Allow this transaction to be replaced by a transaction with higher fees via BIP 125
* @param {number} [options.confTarget] Confirmation target (in blocks)
* @param {Mode} [options.estimateMode] The fee estimate mode, must be one of (Mode.UNSET, Mode.ECONOMICAL, Mode.CONSERVATIVE)
* @return {Promise<string>} hex string of the transaction
*/
async sendMany (
amounts: Record<string, number>,
subtractfeefrom: string [] = [],
options: SendManyOptions = {}): Promise<string> {
const {
comment = '',
replaceable = false,
confTarget = 6,
estimateMode = Mode.UNSET
} = options
const dummy: string = '' // Must be set to '' for backward compatibality.
const minconf: number = 0 // Ignored dummy value
return await this.client.call(
'sendmany',
[
dummy, amounts, minconf, comment, subtractfeefrom,
replaceable, confTarget, estimateMode
],
'bignumber'
)
}
/**
* Reveals the private key corresponding to an address.
*
* @param {string} address The DFI address for the private key.
* @return {Promise<string>}
*/
async dumpPrivKey (address: string): Promise<string> {
return await this.client.call('dumpprivkey', [address], 'number')
}
/**
* Adds a private key (as returned by dumpprivkey) to your wallet. Requires a new wallet backup.
*
* @param {string} privkey The private key (see dumpprivkey)
* @param {string} [label=""] current label if address exists, otherwise "".
* @param {boolean} [rescan=true] Rescan the wallet for transactions
*/
async importPrivKey (privkey: string, label: string = '', rescan: boolean = true): Promise<void> {
return await this.client.call('importprivkey', [privkey, label, rescan], 'number')
}
/**
* Get detailed information about in-wallet transaction
*
* @param {string} txid transaction id
* @param {boolean} includeWatchOnly optional, default = true
* @return {Promise<InWalletTransaction>}
*/
async getTransaction (txid: string, includeWatchOnly: boolean = true): Promise<InWalletTransaction> {
return await this.client.call('gettransaction', [txid, includeWatchOnly], { amount: 'bignumber' })
}
}
export interface UTXO {
txid: string
vout: number
address: string
label: string
scriptPubKey: string
amount: BigNumber
tokenId: number
confirmations: number
redeemScript: number
witnessScript: number
spendable: boolean
solvable: boolean
reused: string
desc: string
safe: boolean
}
export interface ListUnspentOptions {
addresses?: string[]
includeUnsafe?: boolean
queryOptions?: ListUnspentQueryOptions
}
export interface ListUnspentQueryOptions {
minimumAmount?: number
maximumAmount?: number
maximumCount?: number
minimumSumAmount?: number
tokenId?: string
}
export interface CreateWalletOptions {
blank?: boolean
passphrase?: string
avoidReuse?: boolean
}
export interface SendToAddressOptions {
comment?: string
commentTo?: string
subtractFeeFromAmount?: boolean
replaceable?: boolean
confTarget?: number
estimateMode?: Mode
avoidReuse?: boolean
}
export interface SendManyOptions {
comment?: string
replaceable?: boolean
confTarget?: number
estimateMode?: Mode
}
export interface CreateWalletResult {
name: string
warning: string
}
export interface WalletInfo {
walletname: string
walletversion: number
balance: BigNumber
unconfirmed_balance: BigNumber
immature_balance: BigNumber
txcount: number
keypoololdest: number
keypoolsize: number
keypoolsize_hd_internal: number
unlocked_until: number
paytxfee: BigNumber
hdseedid: string
private_keys_enabled: boolean
avoid_reuse: boolean
scanning: {
duration: number
progress: number
}
}
export interface ValidateAddressResult {
isvalid: boolean
address: string
scriptPubKey: string
isscript: boolean
iswitness: boolean
witness_version: number
witness_program: string
}
export interface AddressInfo {
address: string
scriptPubKey: string
ismine: boolean
iswatchonly: boolean
solvable: boolean
desc: string
isscript: boolean
ischange: true
iswitness: boolean
witness_version: number
witness_program: string
script: ScriptType
hex: string
pubkeys: string[]
sigsrequired: number
pubkey: string
embedded: {
address: string
scriptPubKey: string
isscript: boolean
iswitness: boolean
witness_version: number
witness_program: string
script: ScriptType
hex: string
sigsrequired: number
pubkey: string
pubkeys: string[]
}
iscompressed: boolean
label: string
timestamp: number
hdkeypath: string
hdseedid: string
hdmasterfingerprint: string
labels: Label[]
}
export interface Label {
name: string
purpose: string
}
export interface WalletFlagResult {
flag_name: string
flag_state: boolean
warnings: string
}
export interface InWalletTransaction {
amount: BigNumber
fee: number
confirmations: number
blockhash: string
blockindex: number
blocktime: number
txid: string
time: number
timereceived: number
bip125replaceable?: BIP125
details: InWalletTransactionDetail[]
hex: string
}
export interface InWalletTransactionDetail {
address: string
category: InWalletTransactionCategory
amount: number
label: string
vout: number
fee: number
abandoned: boolean
}
export interface WalletBalances {
mine: WalletMineBalances
watchonly?: WalletWatchOnlyBalances
}
export interface WalletMineBalances {
trusted: BigNumber
untrusted_pending: BigNumber
immature: BigNumber
used?: BigNumber
}
export interface WalletWatchOnlyBalances {
trusted: BigNumber
untrusted_pending: BigNumber
immature: BigNumber
} | the_stack |
import * as assert from 'assert';
import { createRegex } from '../utils/glob';
suite('Glob', () => {
test('Glob matching, where fileMatch is a wildcard pattern, contains no double-star, and denotes filename only', async function () {
const pattern = '**/*.foo.json';
const positives = ['file:///folder/a.foo.json'];
const negatives = ['file:///folder/a.bar.json'];
for (const positive of positives) {
assertGlobMatch(pattern, positive);
}
for (const negative of negatives) {
assertNoGlobMatch(pattern, negative);
}
});
test('Glob matching, where fileMatch is a wildcard pattern, contains no double-star, and denotes a path', async function () {
const pattern = '**/foo/*/bar.json';
const positives = ['file:///folder/foo/bat/bar.json'];
const negatives = ['file:///folder/a.bar.json', 'file:///folder/foo/bar.json', 'file:///folder/foo/can/be/as/deep/as/the/ocean/floor/bar.json', 'file:///folder/foo/bar.json/bat/bar.json', 'file:///folder/foo.bar.json', 'file:///folder/foo.bat/bar.json', 'file:///folder/foo/bar.json/bat.json', 'file:///folder/.foo/bar.json', 'file:///folder/.foo/bat/bar.json', 'file:///folder/.foo/bat/man/bar.json'];
for (const positive of positives) {
assertGlobMatch(pattern, positive);
}
for (const negative of negatives) {
assertNoGlobMatch(pattern, negative);
}
});
test('Glob matching, where fileMatch is a wildcard pattern, contains double-star, and denotes a path', async function () {
const pattern = '**/foo/**/bar.json';
const positives = ['file:///folder/foo/bar.json', 'file:///folder/foo/bat/bar.json', 'file:///folder/foo/can/be/as/deep/as/the/ocean/floor/bar.json', 'file:///folder/foo/bar.json/bat/bar.json'];
const negatives = ['file:///folder/a.bar.json', 'file:///folder/foo.bar.json', 'file:///folder/foo.bat/bar.json', 'file:///folder/foo/bar.json/bat.json', 'file:///folder/.foo/bar.json', 'file:///folder/.foo/bat/bar.json', 'file:///folder/.foo/bat/man/bar.json', 'file:///folder/foo?foo/bat/man/bar.json'];
for (const positive of positives) {
assertGlobMatch(pattern, positive);
}
for (const negative of negatives) {
assertNoGlobMatch(pattern, negative);
}
});
function assertMatch(pattern: string, input: string, expected: boolean) {
try {
const regex = createRegex(pattern, { extended: true, globstar: true });
const result = regex.test(input);
if (result !== expected) {
assert(false, `pattern: ${pattern}, regex: ${regex.source}, input: ${input}, should match ${expected}`);
}
} catch (e) {
assert(false, `pattern: ${pattern}, input: ${input}, should match ${expected}`);
}
}
function assertGlobMatch(pattern: string, input: string) {
assertMatch(pattern, input, true);
}
function assertNoGlobMatch(pattern: string, input: string) {
assertMatch(pattern, input, false);
}
test('simple', () => {
let p = 'node_modules';
assertGlobMatch(p, 'node_modules');
assertNoGlobMatch(p, 'node_module');
assertNoGlobMatch(p, '/node_modules');
assertNoGlobMatch(p, 'test/node_modules');
p = 'test.txt';
assertGlobMatch(p, 'test.txt');
assertNoGlobMatch(p, 'test?txt');
assertNoGlobMatch(p, '/text.txt');
assertNoGlobMatch(p, 'test/test.txt');
p = 'test(.txt';
assertGlobMatch(p, 'test(.txt');
assertNoGlobMatch(p, 'test?txt');
p = 'qunit';
assertGlobMatch(p, 'qunit');
assertNoGlobMatch(p, 'qunit.css');
assertNoGlobMatch(p, 'test/qunit');
// Absolute
p = '/DNXConsoleApp/**/*.cs';
assertGlobMatch(p, '/DNXConsoleApp/Program.cs');
assertGlobMatch(p, '/DNXConsoleApp/foo/Program.cs');
p = '*';
assertGlobMatch(p, '');
});
test('dot hidden', function () {
let p = '.*';
assertGlobMatch(p, '.git');
assertGlobMatch(p, '.hidden.txt');
assertNoGlobMatch(p, 'git');
assertNoGlobMatch(p, 'hidden.txt');
assertNoGlobMatch(p, 'path/.git');
assertNoGlobMatch(p, 'path/.hidden.txt');
p = '**/.*';
assertGlobMatch(p, '.git');
assertGlobMatch(p, '.hidden.txt');
assertNoGlobMatch(p, 'git');
assertNoGlobMatch(p, 'hidden.txt');
assertGlobMatch(p, 'path/.git');
assertGlobMatch(p, 'path/.hidden.txt');
assertNoGlobMatch(p, 'path/git');
assertNoGlobMatch(p, 'pat.h/hidden.txt');
p = '._*';
assertGlobMatch(p, '._git');
assertGlobMatch(p, '._hidden.txt');
assertNoGlobMatch(p, 'git');
assertNoGlobMatch(p, 'hidden.txt');
assertNoGlobMatch(p, 'path/._git');
assertNoGlobMatch(p, 'path/._hidden.txt');
p = '**/._*';
assertGlobMatch(p, '._git');
assertGlobMatch(p, '._hidden.txt');
assertNoGlobMatch(p, 'git');
assertNoGlobMatch(p, 'hidden._txt');
assertGlobMatch(p, 'path/._git');
assertGlobMatch(p, 'path/._hidden.txt');
assertNoGlobMatch(p, 'path/git');
assertNoGlobMatch(p, 'pat.h/hidden._txt');
});
test('file pattern', function () {
let p = '*.js';
assertGlobMatch(p, 'foo.js');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
p = 'html.*';
assertGlobMatch(p, 'html.js');
assertGlobMatch(p, 'html.txt');
assertNoGlobMatch(p, 'htm.txt');
p = '*.*';
assertGlobMatch(p, 'html.js');
assertGlobMatch(p, 'html.txt');
assertGlobMatch(p, 'htm.txt');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
p = 'node_modules/test/*.js';
assertGlobMatch(p, 'node_modules/test/foo.js');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_module/test/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
});
test('star', () => {
let p = 'node*modules';
assertGlobMatch(p, 'node_modules');
assertGlobMatch(p, 'node_super_modules');
assertNoGlobMatch(p, 'node_module');
assertNoGlobMatch(p, '/node_modules');
assertNoGlobMatch(p, 'test/node_modules');
p = '*';
assertGlobMatch(p, 'html.js');
assertGlobMatch(p, 'html.txt');
assertGlobMatch(p, 'htm.txt');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
});
test('file / folder match', function () {
let p = '**/node_modules/**';
assertGlobMatch(p, 'node_modules');
assertGlobMatch(p, 'node_modules/');
assertGlobMatch(p, 'a/node_modules');
assertGlobMatch(p, 'a/node_modules/');
assertGlobMatch(p, 'node_modules/foo');
assertGlobMatch(p, 'foo/node_modules/foo/bar');
});
test('questionmark', () => {
let p = 'node?modules';
assertGlobMatch(p, 'node_modules');
assertNoGlobMatch(p, 'node_super_modules');
assertNoGlobMatch(p, 'node_module');
assertNoGlobMatch(p, '/node_modules');
assertNoGlobMatch(p, 'test/node_modules');
p = '?';
assertGlobMatch(p, 'h');
assertNoGlobMatch(p, 'html.txt');
assertNoGlobMatch(p, 'htm.txt');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
});
test('globstar', () => {
let p = '**/*.js';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'folder/foo.js');
assertGlobMatch(p, '/node_modules/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
assertNoGlobMatch(p, '/some.js/test');
assertNoGlobMatch(p, '/some.js/test');
p = '**/project.json';
assertGlobMatch(p, 'project.json');
assertGlobMatch(p, '/project.json');
assertGlobMatch(p, 'some/folder/project.json');
assertNoGlobMatch(p, 'some/folder/file_project.json');
assertNoGlobMatch(p, 'some/folder/fileproject.json');
assertNoGlobMatch(p, 'some/rrproject.json');
p = 'test/**';
assertGlobMatch(p, 'test');
assertGlobMatch(p, 'test/foo.js');
assertGlobMatch(p, 'test/other/foo.js');
assertNoGlobMatch(p, 'est/other/foo.js');
p = '**';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'folder/foo.js');
assertGlobMatch(p, '/node_modules/foo.js');
assertGlobMatch(p, 'foo.jss');
assertGlobMatch(p, 'some.js/test');
p = 'test/**/*.js';
assertGlobMatch(p, 'test/foo.js');
assertGlobMatch(p, 'test/other/foo.js');
assertGlobMatch(p, 'test/other/more/foo.js');
assertNoGlobMatch(p, 'test/foo.ts');
assertNoGlobMatch(p, 'test/other/foo.ts');
assertNoGlobMatch(p, 'test/other/more/foo.ts');
p = '**/**/*.js';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'folder/foo.js');
assertGlobMatch(p, '/node_modules/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
p = '**/node_modules/**/*.js';
assertNoGlobMatch(p, 'foo.js');
assertNoGlobMatch(p, 'folder/foo.js');
assertGlobMatch(p, 'node_modules/foo.js');
assertGlobMatch(p, 'node_modules/some/folder/foo.js');
assertNoGlobMatch(p, 'node_modules/some/folder/foo.ts');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
p = '{**/node_modules/**,**/.git/**,**/bower_components/**}';
assertGlobMatch(p, 'node_modules');
assertGlobMatch(p, '/node_modules');
assertGlobMatch(p, '/node_modules/more');
assertGlobMatch(p, 'some/test/node_modules');
assertGlobMatch(p, 'some/test/node_modules');
assertGlobMatch(p, 'C://some/test/node_modules');
assertGlobMatch(p, 'C://some/test/node_modules/more');
assertGlobMatch(p, 'bower_components');
assertGlobMatch(p, 'bower_components/more');
assertGlobMatch(p, '/bower_components');
assertGlobMatch(p, 'some/test/bower_components');
assertGlobMatch(p, 'some/test/bower_components');
assertGlobMatch(p, 'C://some/test/bower_components');
assertGlobMatch(p, 'C://some/test/bower_components/more');
assertGlobMatch(p, '.git');
assertGlobMatch(p, '/.git');
assertGlobMatch(p, 'some/test/.git');
assertGlobMatch(p, 'some/test/.git');
assertGlobMatch(p, 'C://some/test/.git');
assertNoGlobMatch(p, 'tempting');
assertNoGlobMatch(p, '/tempting');
assertNoGlobMatch(p, 'some/test/tempting');
assertNoGlobMatch(p, 'some/test/tempting');
assertNoGlobMatch(p, 'C://some/test/tempting');
p = '{**/package.json,**/project.json}';
assertGlobMatch(p, 'package.json');
assertGlobMatch(p, '/package.json');
assertNoGlobMatch(p, 'xpackage.json');
assertNoGlobMatch(p, '/xpackage.json');
});
test('issue 41724', function () {
let p = 'some/**/*.js';
assertGlobMatch(p, 'some/foo.js');
assertGlobMatch(p, 'some/folder/foo.js');
assertNoGlobMatch(p, 'something/foo.js');
assertNoGlobMatch(p, 'something/folder/foo.js');
p = 'some/**/*';
assertGlobMatch(p, 'some/foo.js');
assertGlobMatch(p, 'some/folder/foo.js');
assertNoGlobMatch(p, 'something/foo.js');
assertNoGlobMatch(p, 'something/folder/foo.js');
});
test('brace expansion', function () {
let p = '*.{html,js}';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'foo.html');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
p = '*.{html}';
assertGlobMatch(p, 'foo.html');
assertNoGlobMatch(p, 'foo.js');
assertNoGlobMatch(p, 'folder/foo.js');
assertNoGlobMatch(p, '/node_modules/foo.js');
assertNoGlobMatch(p, 'foo.jss');
assertNoGlobMatch(p, 'some.js/test');
p = '{node_modules,testing}';
assertGlobMatch(p, 'node_modules');
assertGlobMatch(p, 'testing');
assertNoGlobMatch(p, 'node_module');
assertNoGlobMatch(p, 'dtesting');
p = '**/{foo,bar}';
assertGlobMatch(p, 'foo');
assertGlobMatch(p, 'bar');
assertGlobMatch(p, 'test/foo');
assertGlobMatch(p, 'test/bar');
assertGlobMatch(p, 'other/more/foo');
assertGlobMatch(p, 'other/more/bar');
p = '{foo,bar}/**';
assertGlobMatch(p, 'foo');
assertGlobMatch(p, 'bar');
assertGlobMatch(p, 'foo/test');
assertGlobMatch(p, 'bar/test');
assertGlobMatch(p, 'foo/other/more');
assertGlobMatch(p, 'bar/other/more');
p = '{**/*.d.ts,**/*.js}';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'testing/foo.js');
assertGlobMatch(p, 'testing/foo.js');
assertGlobMatch(p, '/testing/foo.js');
assertGlobMatch(p, '/testing/foo.js');
assertGlobMatch(p, 'C:/testing/foo.js');
assertGlobMatch(p, 'foo.d.ts');
assertGlobMatch(p, 'testing/foo.d.ts');
assertGlobMatch(p, 'testing/foo.d.ts');
assertGlobMatch(p, '/testing/foo.d.ts');
assertGlobMatch(p, '/testing/foo.d.ts');
assertGlobMatch(p, 'C:/testing/foo.d.ts');
assertNoGlobMatch(p, 'foo.d');
assertNoGlobMatch(p, 'testing/foo.d');
assertNoGlobMatch(p, 'testing/foo.d');
assertNoGlobMatch(p, '/testing/foo.d');
assertNoGlobMatch(p, '/testing/foo.d');
assertNoGlobMatch(p, 'C:/testing/foo.d');
p = '{**/*.d.ts,**/*.js,path/simple.jgs}';
assertGlobMatch(p, 'foo.js');
assertGlobMatch(p, 'testing/foo.js');
assertGlobMatch(p, 'testing/foo.js');
assertGlobMatch(p, '/testing/foo.js');
assertGlobMatch(p, 'path/simple.jgs');
assertNoGlobMatch(p, '/path/simple.jgs');
assertGlobMatch(p, '/testing/foo.js');
assertGlobMatch(p, 'C:/testing/foo.js');
p = '{**/*.d.ts,**/*.js,foo.[0-9]}';
assertGlobMatch(p, 'foo.5');
assertGlobMatch(p, 'foo.8');
assertNoGlobMatch(p, 'bar.5');
assertNoGlobMatch(p, 'foo.f');
assertGlobMatch(p, 'foo.js');
p = 'prefix/{**/*.d.ts,**/*.js,foo.[0-9]}';
assertGlobMatch(p, 'prefix/foo.5');
assertGlobMatch(p, 'prefix/foo.8');
assertNoGlobMatch(p, 'prefix/bar.5');
assertNoGlobMatch(p, 'prefix/foo.f');
assertGlobMatch(p, 'prefix/foo.js');
});
}); | the_stack |
import {LanguageService} from '../../src/language_service';
import {APP_COMPONENT, MockService, setup} from './mock_host';
import {humanizeDefinitionInfo} from './test_utils';
describe('definitions', () => {
let service: MockService;
let ngLS: LanguageService;
beforeAll(() => {
const {project, service: _service, tsLS} = setup();
service = _service;
ngLS = new LanguageService(project, tsLS, {});
});
beforeEach(() => {
service.reset();
});
describe('elements', () => {
it('should work for native elements', () => {
const defs = getDefinitionsAndAssertBoundSpan({
templateOverride: `<butt¦on></button>`,
expectedSpanText: `<button></button>`,
});
expect(defs.length).toEqual(2);
expect(defs[0].fileName).toContain('lib.dom.d.ts');
expect(defs[0].contextSpan).toContain('interface HTMLButtonElement extends HTMLElement');
expect(defs[1].contextSpan).toContain('declare var HTMLButtonElement');
});
});
describe('templates', () => {
it('should return no definitions for ng-templates', () => {
const {position} =
service.overwriteInlineTemplate(APP_COMPONENT, `<ng-templ¦ate></ng-template>`);
const definitionAndBoundSpan = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(definitionAndBoundSpan).toBeUndefined();
});
});
describe('directives', () => {
it('should work for directives', () => {
const defs = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div string-model¦></div>`,
expectedSpanText: 'string-model',
});
expect(defs.length).toEqual(1);
expect(defs[0].contextSpan).toContain('@Directive');
expect(defs[0].contextSpan).toContain('export class StringModel');
});
it('should work for components', () => {
const templateOverride = `
<t¦est-comp>
<div>some stuff in the middle</div>
</test-comp>`;
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride,
expectedSpanText: templateOverride.replace('¦', '').trim(),
});
expect(definitions.length).toEqual(1);
expect(definitions.length).toEqual(1);
expect(definitions[0].textSpan).toEqual('TestComponent');
expect(definitions[0].contextSpan).toContain('@Component');
});
it('should work for structural directives', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *¦ngFor="let item of heroes"></div>`,
expectedSpanText: 'ngFor',
});
expect(definitions.length).toEqual(1);
expect(definitions[0].fileName).toContain('ng_for_of.d.ts');
expect(definitions[0].textSpan).toEqual('NgForOf');
expect(definitions[0].contextSpan)
.toContain(
'export declare class NgForOf<T, U extends NgIterable<T> = NgIterable<T>> implements DoCheck');
});
it('should return binding for structural directive where key maps to a binding', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ng¦If="anyValue"></div>`,
expectedSpanText: 'ngIf',
});
// Because the input is also part of the selector, the directive is also returned.
expect(definitions!.length).toEqual(2);
const [inputDef, directiveDef] = definitions;
expect(inputDef.textSpan).toEqual('ngIf');
expect(inputDef.contextSpan).toEqual('set ngIf(condition: T);');
expect(directiveDef.textSpan).toEqual('NgIf');
expect(directiveDef.contextSpan).toContain('export declare class NgIf');
});
it('should work for directives with compound selectors', () => {
let defs = getDefinitionsAndAssertBoundSpan({
templateOverride: `<button com¦pound custom-button></button>`,
expectedSpanText: 'compound',
});
expect(defs.length).toEqual(1);
expect(defs[0].contextSpan).toContain('export class CompoundCustomButtonDirective');
defs = getDefinitionsAndAssertBoundSpan({
templateOverride: `<button compound cu¦stom-button></button>`,
expectedSpanText: 'custom-button',
});
expect(defs.length).toEqual(1);
expect(defs[0].contextSpan).toContain('export class CompoundCustomButtonDirective');
});
});
describe('bindings', () => {
describe('inputs', () => {
it('should work for input providers', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp [tcN¦ame]="name"></test-comp>`,
expectedSpanText: 'tcName',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('name');
expect(def.contextSpan).toEqual(`@Input('tcName') name = 'test';`);
});
it('should work for text inputs', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp tcN¦ame="name"></test-comp>`,
expectedSpanText: 'tcName',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('name');
expect(def.contextSpan).toEqual(`@Input('tcName') name = 'test';`);
});
it('should work for structural directive inputs ngForTrackBy', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let item of heroes; tr¦ackBy: test;"></div>`,
expectedSpanText: 'trackBy',
});
expect(definitions!.length).toEqual(2);
const [setterDef, getterDef] = definitions;
expect(setterDef.fileName).toContain('ng_for_of.d.ts');
expect(setterDef.textSpan).toEqual('ngForTrackBy');
expect(setterDef.contextSpan).toEqual('set ngForTrackBy(fn: TrackByFunction<T>);');
expect(getterDef.textSpan).toEqual('ngForTrackBy');
expect(getterDef.contextSpan).toEqual('get ngForTrackBy(): TrackByFunction<T>;');
});
it('should work for structural directive inputs ngForOf', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let item o¦f heroes"></div>`,
expectedSpanText: 'of',
});
// Because the input is also part of the selector ([ngFor][ngForOf]), the directive is also
// returned.
expect(definitions!.length).toEqual(2);
const [inputDef, directiveDef] = definitions;
expect(inputDef.textSpan).toEqual('ngForOf');
expect(inputDef.contextSpan)
.toEqual('set ngForOf(ngForOf: U & NgIterable<T> | undefined | null);');
expect(directiveDef.textSpan).toEqual('NgForOf');
expect(directiveDef.contextSpan).toContain('export declare class NgForOf');
});
it('should work for two-way binding providers', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp string-model [(mo¦del)]="title"></test-comp>`,
expectedSpanText: 'model',
});
expect(definitions!.length).toEqual(2);
const [inputDef, outputDef] = definitions;
expect(inputDef.textSpan).toEqual('model');
expect(inputDef.contextSpan).toEqual(`@Input() model: string = 'model';`);
expect(outputDef.textSpan).toEqual('modelChange');
expect(outputDef.contextSpan)
.toEqual(`@Output() modelChange: EventEmitter<string> = new EventEmitter();`);
});
});
describe('outputs', () => {
it('should work for event providers', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp (te¦st)="myClick($event)"></test-comp>`,
expectedSpanText: 'test',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('testEvent');
expect(def.contextSpan).toEqual('@Output(\'test\') testEvent = new EventEmitter();');
});
it('should return nothing for $event from EventEmitter', () => {
const {position} = service.overwriteInlineTemplate(
APP_COMPONENT, `<div string-model (modelChange)="myClick($e¦vent)"></div>`);
const definitionAndBoundSpan = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(definitionAndBoundSpan).toBeUndefined();
});
it('should return the directive when the event is part of the selector', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (eventSelect¦or)="title = ''"></div>`,
expectedSpanText: `eventSelector`,
});
expect(definitions!.length).toEqual(2);
const [inputDef, directiveDef] = definitions;
expect(inputDef.textSpan).toEqual('eventSelector');
expect(inputDef.contextSpan).toEqual('@Output() eventSelector = new EventEmitter<void>();');
expect(directiveDef.textSpan).toEqual('EventSelectorDirective');
expect(directiveDef.contextSpan).toContain('export class EventSelectorDirective');
});
it('should work for $event from native element', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (cl¦ick)="myClick($event)"></div>`,
expectedSpanText: 'click',
});
expect(definitions!.length).toEqual(1);
expect(definitions[0].textSpan).toEqual('addEventListener');
expect(definitions[0].contextSpan)
.toContain('addEventListener<K extends keyof HTMLElementEventMap>');
expect(definitions[0].fileName).toContain('lib.dom.d.ts');
});
});
});
describe('references', () => {
it('should work for element reference declarations', () => {
const {position} =
service.overwriteInlineTemplate(APP_COMPONENT, `<div #cha¦rt></div>{{chart}}`);
const definitionAndBoundSpan = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
// We're already at the definition, so nothing is returned
expect(definitionAndBoundSpan).toBeUndefined();
});
it('should work for element reference uses', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div #chart></div>{{char¦t}}`,
expectedSpanText: 'chart',
});
expect(definitions!.length).toEqual(1);
const [varDef] = definitions;
expect(varDef.textSpan).toEqual('chart');
});
});
describe('variables', () => {
it('should work for array members', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let hero of heroes">{{her¦o}}</div>`,
expectedSpanText: 'hero',
});
expect(definitions!.length).toEqual(2);
const [templateDeclarationDef, contextDef] = definitions;
expect(templateDeclarationDef.textSpan).toEqual('hero');
// `$implicit` is from the `NgForOfContext`:
// https://github.com/angular/angular/blob/89c5255b8ca59eed27ede9e1fad69857ab0c6f4f/packages/common/src/directives/ng_for_of.ts#L15
expect(contextDef.textSpan).toEqual('$implicit');
expect(contextDef.contextSpan).toContain('$implicit: T;');
});
});
describe('pipes', () => {
it('should work for pipes', () => {
const templateOverride = `<p>The hero's birthday is {{birthday | da¦te: "MM/dd/yy"}}</p>`;
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride,
expectedSpanText: 'date',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('transform');
expect(def.contextSpan).toContain('transform(value: Date | string | number, ');
});
});
describe('expressions', () => {
it('should find members in a text interpolation', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div>{{ tit¦le }}</div>`,
expectedSpanText: 'title',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('title');
expect(def.contextSpan).toEqual(`title = 'Tour of Heroes';`);
});
it('should work for accessed property reads', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div>{{title.len¦gth}}</div>`,
expectedSpanText: 'length',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('length');
expect(def.contextSpan).toEqual('readonly length: number;');
});
it('should find members in an attribute interpolation', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div string-model model="{{tit¦le}}"></div>`,
expectedSpanText: 'title',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('title');
expect(def.contextSpan).toEqual(`title = 'Tour of Heroes';`);
});
it('should find members of input binding', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp [tcName]="ti¦tle"></test-comp>`,
expectedSpanText: 'title',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('title');
expect(def.contextSpan).toEqual(`title = 'Tour of Heroes';`);
});
it('should find members of event binding', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<test-comp (test)="ti¦tle=$event"></test-comp>`,
expectedSpanText: 'title',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('title');
expect(def.contextSpan).toEqual(`title = 'Tour of Heroes';`);
});
it('should work for method calls', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (click)="setT¦itle('title')"></div>`,
expectedSpanText: 'setTitle',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('setTitle');
expect(def.contextSpan).toContain('setTitle(newTitle: string)');
});
it('should work for accessed properties in writes', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (click)="hero.i¦d = 2"></div>`,
expectedSpanText: 'id',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('id');
expect(def.contextSpan).toEqual('id: number;');
});
it('should work for method call arguments', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (click)="setTitle(hero.nam¦e)"></div>`,
expectedSpanText: 'name',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('name');
expect(def.contextSpan).toEqual('name: string;');
});
it('should find members of two-way binding', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<input [(ngModel)]="ti¦tle" />`,
expectedSpanText: 'title',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('title');
expect(def.contextSpan).toEqual(`title = 'Tour of Heroes';`);
});
it('should find members in a structural directive', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngIf="anyV¦alue"></div>`,
expectedSpanText: 'anyValue',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('anyValue');
expect(def.contextSpan).toEqual('anyValue: any;');
});
it('should work for variables in structural directives', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let item of heroes as her¦oes2; trackBy: test;"></div>`,
expectedSpanText: 'heroes2',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('ngForOf');
expect(def.contextSpan).toEqual('ngForOf: U;');
});
it('should work for uses of members in structural directives', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let item of heroes as heroes2">{{her¦oes2}}</div>`,
expectedSpanText: 'heroes2',
});
expect(definitions!.length).toEqual(2);
const [def, contextDef] = definitions;
expect(def.textSpan).toEqual('heroes2');
expect(def.contextSpan).toEqual('of heroes as heroes2');
expect(contextDef.textSpan).toEqual('ngForOf');
expect(contextDef.contextSpan).toEqual('ngForOf: U;');
});
it('should work for members in structural directives', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div *ngFor="let item of her¦oes; trackBy: test;"></div>`,
expectedSpanText: 'heroes',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('heroes');
expect(def.contextSpan).toEqual('heroes: Hero[] = [this.hero];');
});
it('should return nothing for the $any() cast function', () => {
const {position} =
service.overwriteInlineTemplate(APP_COMPONENT, `<div>{{$an¦y(title)}}</div>`);
const definitionAndBoundSpan = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(definitionAndBoundSpan).toBeUndefined();
});
it('should work for object literals with shorthand declarations in an action', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `<div (click)="setHero({na¦me, id: 1})"></div>`,
expectedSpanText: 'name',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('name');
expect(def.fileName).toContain('/app/app.component.ts');
expect(def.contextSpan).toContain(`name = 'Frodo';`);
});
it('should work for object literals with shorthand declarations in a data binding', () => {
const definitions = getDefinitionsAndAssertBoundSpan({
templateOverride: `{{ {na¦me} }}`,
expectedSpanText: 'name',
});
expect(definitions!.length).toEqual(1);
const [def] = definitions;
expect(def.textSpan).toEqual('name');
expect(def.fileName).toContain('/app/app.component.ts');
expect(def.contextSpan).toContain(`name = 'Frodo';`);
});
});
describe('external resources', () => {
it('should be able to find a template from a url', () => {
const {position, text} = service.overwrite(APP_COMPONENT, `
import {Component} from '@angular/core';
@Component({
templateUrl: './tes¦t.ng',
})
export class AppComponent {}`);
const result = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(result).toBeDefined();
const {textSpan, definitions} = result!;
expect(text.substring(textSpan.start, textSpan.start + textSpan.length)).toEqual('./test.ng');
expect(definitions).toBeDefined();
expect(definitions!.length).toBe(1);
const [def] = definitions!;
expect(def.fileName).toContain('/app/test.ng');
expect(def.textSpan).toEqual({start: 0, length: 0});
});
it('should be able to find a stylesheet from a url', () => {
const {position, text} = service.overwrite(APP_COMPONENT, `
import {Component} from '@angular/core';
@Component({
template: 'empty',
styleUrls: ['./te¦st.css']
})
export class AppComponent {}`);
const result = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(result).toBeDefined();
const {textSpan, definitions} = result!;
expect(text.substring(textSpan.start, textSpan.start + textSpan.length))
.toEqual('./test.css');
expect(definitions).toBeDefined();
expect(definitions!.length).toBe(1);
const [def] = definitions!;
expect(def.fileName).toContain('/app/test.css');
expect(def.textSpan).toEqual({start: 0, length: 0});
});
xit('should be able to find a resource url with malformed component meta', () => {
const {position, text} = service.overwrite(APP_COMPONENT, `
import {Component} from '@angular/core';
@Component({
invalidProperty: '',
styleUrls: ['./te¦st.css']
})
export class AppComponent {}`);
const result = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(result).toBeDefined();
const {textSpan, definitions} = result!;
expect(text.substring(textSpan.start, textSpan.start + textSpan.length))
.toEqual('./test.css');
expect(definitions).toBeDefined();
expect(definitions![0].fileName).toContain('/app/test.css');
});
});
function getDefinitionsAndAssertBoundSpan(
{templateOverride, expectedSpanText}: {templateOverride: string, expectedSpanText: string}):
Array<{textSpan: string, contextSpan: string | undefined, fileName: string}> {
const {position, text} = service.overwriteInlineTemplate(APP_COMPONENT, templateOverride);
const definitionAndBoundSpan = ngLS.getDefinitionAndBoundSpan(APP_COMPONENT, position);
expect(definitionAndBoundSpan).toBeTruthy();
const {textSpan, definitions} = definitionAndBoundSpan!;
expect(text.substring(textSpan.start, textSpan.start + textSpan.length))
.toEqual(expectedSpanText);
expect(definitions).toBeTruthy();
return definitions!.map(d => humanizeDefinitionInfo(d, service));
}
}); | the_stack |
import {assert} from "chai";
import {SkillResponse} from "../src/core/SkillResponse";
import {VirtualAlexa} from "../src/core/VirtualAlexa";
describe("VirtualAlexa Tests Using Files", function() {
it("Parses the files and does a simple utterance", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
let requestToCheck: any;
assert(virtualAlexa.filter((request) => {
requestToCheck = request;
}));
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response);
assert.isTrue(response.success);
assert.equal(virtualAlexa.context().locale(), "en-US");
assert.equal(requestToCheck.request.locale, "en-US");
});
it("Parses lambda file with parent directory path", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/../test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
let requestToCheck: any;
assert(virtualAlexa.filter((request) => {
requestToCheck = request;
}));
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response);
assert.isTrue(response.success);
assert.equal(virtualAlexa.context().locale(), "en-US");
assert.equal(requestToCheck.request.locale, "en-US");
});
it("Parses the files and does a simple utterance in german", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.js")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.locale("de-DE")
.create();
let requestToCheck: any;
assert(virtualAlexa.filter((request) => {
requestToCheck = request;
}));
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response);
assert.isTrue(response.success);
assert.equal(virtualAlexa.context().locale(), "de-DE");
assert.equal(requestToCheck.request.locale, "de-DE");
});
it("Parses the SMAPI format interaction model and does a simple utterance", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModelFile("./test/resources/InteractionModelSMAPI.json")
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "TellMeMoreIntent");
}).utter("contact info");
});
it("Parses the Interaction Model format V2 and does a simple utterance", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModelFile("./test/resources/LanguageModel.json")
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "TellMeMoreIntent");
}).utter("contact info");
});
it("Parses the Interaction Model from a locale and does a simple utterance", async () => {
process.chdir("test/resources");
const virtualAlexa = VirtualAlexa.Builder()
.handler("index.handler")
.locale("de-DE")
.create();
const response = await virtualAlexa.utter("contact info") as any;
assert.equal(response.intent, "TellMeMoreIntent");
process.chdir("../..");
});
it("Parses the Interaction Model from the default locale and does a simple utterance", async () => {
process.chdir("test/resources");
const virtualAlexa = VirtualAlexa.Builder()
.handler("index.handler")
.create();
const response = await virtualAlexa.utter("contact info") as any;
assert.equal(response.intent, "TellMeMoreIntent");
process.chdir("../..");
});
it("Throws error when locale file is not present", async () => {
try {
const virtualAlexa = VirtualAlexa.Builder()
.handler("index.handler")
.create();
assert(false, "This should not be reached");
} catch (e) {
assert.isDefined(e);
}
});
it("Has a bad filename", () => {
try {
VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterancesWrong.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
assert(false, "This should not be reached");
} catch (e) {
assert.isDefined(e);
}
});
});
describe("VirtualAlexa Tests Using URL", function() {
this.timeout(5000);
it("Calls a remote mock service via HTTPS", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.intentSchemaFile("./test/resources/IntentSchema.json")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.skillURL("https://httpbin.org/post")
.create();
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response.data);
assert.equal(response.url, "https://httpbin.org/post");
});
it("Calls a remote mock service via HTTP", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.intentSchemaFile("./test/resources/IntentSchema.json")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.skillURL("http://httpbin.org/post")
.create();
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response.data);
assert.equal(response.url, "http://httpbin.org/post");
});
});
describe("VirtualAlexa Tests Using Unified Interaction Model", function() {
const interactionModel = {
intents: [
{
name: "Play",
samples: ["play", "play next", "play now"],
},
{
name: "SlottedIntent",
samples: ["slot {SlotName}"],
slots: [
{name: "SlotName", type: "SLOT_TYPE"},
],
},
{
name: "SlottedIntentEmptySynonymArray",
samples: ["slotEmptySynonymArray {SlotEmptySynonymArray}"],
slots: [
{name: "SlotEmptySynonymArray", type: "SLOT_EMPTY_SYNONYM_ARRAY_TYPE"},
],
},
{
name: "MultipleSlots",
samples: ["multiple {SlotA} and {SlotB}", "reversed {SlotB} then {SlotA}"],
slots: [
{name: "SlotA", type: "SLOT_TYPE"},
{name: "SlotB", type: "SLOT_TYPE"},
],
},
{
name: "CustomSlot",
samples: ["custom {customSlot}"],
slots: [
{name: "customSlot", type: "COUNTRY_CODE"},
],
},
{
name: "CityIntent",
samples: ["city {citySlot}"],
slots: [
{name: "citySlot", type: "AMAZON.Cities"},
],
},
{
name: "StateIntent",
samples: ["state {stateSlot}"],
slots: [
{name: "stateSlot", type: "AMAZON.States"},
],
},
],
types: [
{
name: "SLOT_EMPTY_SYNONYM_ARRAY_TYPE",
values: [
{
id: "null",
name: {
synonyms: [],
value: "VALUE1",
},
},
],
},
{
name: "COUNTRY_CODE",
values: [
{
id: "US",
name: {
synonyms: ["USA", "America", "US", "English Speakers"],
value: "US",
},
},
{
id: "DE",
name: {
synonyms: ["Germany", "DE"],
value: "DE",
},
},
{
id: "UK",
name: {
synonyms: ["United Kingdom", "England", "English Speakers"],
value: "UK",
},
},
],
},
{
name: "AMAZON.Cities",
values: [
{
id: "Lima",
name: {
synonyms: ["Lima"],
value: "Lima, Peru",
},
},
],
},
],
};
it("Parses the JSON and does a simple utterance", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
const response = await virtualAlexa.utter("play now") as any;
assert.isDefined(response);
assert.isTrue(response.success);
});
it("Parses the file and does a simple utterance", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModelFile("./test/resources/InteractionModel.json")
.create();
const response = await virtualAlexa.intend("AMAZON.CancelIntent") as any;
assert.isDefined(response);
assert.isTrue(response.success);
});
it("Utters builtin intent with custom phrase", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CustomSlot");
}).utter("custom DE");
});
it("Utters exit", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "SessionEndedRequest");
}).utter("exit");
});
it("Utters slotted phrase with empty synonym array", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "SlottedIntentEmptySynonymArray");
assert.equal(request.request.intent.slots.SlotEmptySynonymArray.value, "value1");
}).utter("slotEmptySynonymArray value1");
});
it("Utters slotted phrase with different synonym array", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CustomSlot");
assert.equal(request.request.intent.slots.customSlot.value, "UK");
// Verify entity resolution
const resolution = request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority[0];
assert.equal(request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority.length, 1);
assert.equal(resolution.status.code, "ER_SUCCESS_MATCH");
assert.equal(resolution.values.length, 1);
assert.equal(resolution.values[0].value.id, "UK");
assert.equal(resolution.values[0].value.name, "UK");
}).utter("custom UK");
});
it("Utters slotted phrase with synonym value", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CustomSlot");
assert.equal(request.request.intent.slots.customSlot.value, "england");
// Verify entity resolution
const resolution = request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority[0];
assert.equal(request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority.length, 1);
assert.equal(resolution.status.code, "ER_SUCCESS_MATCH");
assert.equal(resolution.values.length, 1);
assert.equal(resolution.values[0].value.id, "UK");
assert.equal(resolution.values[0].value.name, "UK");
}).utter("custom england");
});
it("Utters slotted phrase with multiple synonym matches", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CustomSlot");
assert.equal(request.request.intent.slots.customSlot.value, "English Speakers");
// Verify entity resolution
const resolution = request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority[0];
assert.equal(request.request.intent.slots.customSlot.resolutions.resolutionsPerAuthority.length, 1);
assert.equal(resolution.status.code, "ER_SUCCESS_MATCH");
assert.equal(resolution.values.length, 2);
assert.equal(resolution.values[0].value.id, "US");
assert.equal(resolution.values[0].value.name, "US");
assert.equal(resolution.values[1].value.id, "UK");
assert.equal(resolution.values[1].value.name, "UK");
}).utter("custom English Speakers");
});
it("Utters slotted phrase which matches extended builtin value", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CityIntent");
assert.equal(request.request.intent.slots.citySlot.value, "Lima");
// Verify entity resolution
const resolution = request.request.intent.slots.citySlot.resolutions.resolutionsPerAuthority[0];
assert.equal(request.request.intent.slots.citySlot.resolutions.resolutionsPerAuthority.length, 1);
assert.equal(resolution.status.code, "ER_SUCCESS_MATCH");
assert.equal(resolution.values.length, 1);
assert.equal(resolution.values[0].value.id, "Lima");
assert.equal(resolution.values[0].value.name, "Lima, Peru");
}).utter("city Lima");
});
it("Utters slotted phrase which matches builtin value", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "CityIntent");
assert.equal(request.request.intent.slots.citySlot.value, "Chicago");
// Verify entity resolution
const resolution = request.request.intent.slots.citySlot.resolutions.resolutionsPerAuthority[0];
assert.equal(request.request.intent.slots.citySlot.resolutions.resolutionsPerAuthority.length, 1);
assert.equal(resolution.status.code, "ER_SUCCESS_NO_MATCH");
assert.equal(resolution.values.length, 0);
}).utter("city Chicago");
});
it("Utters slotted phrase which matches builtin value, no extensions", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModel(interactionModel)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "StateIntent");
assert.equal(request.request.intent.slots.stateSlot.value, "Connecticut");
// Verify no entity resolution
assert.isUndefined(request.request.intent.slots.stateSlot.resolutions);
}).utter("state Connecticut");
});
});
describe("VirtualAlexa Tests Using JSON", function() {
const intentSchema = {
intents: [
{
intent: "AFirstIntent",
},
{
intent: "AMAZON.CancelIntent",
},
{
intent: "AMAZON.StopIntent",
},
{
intent: "Play",
},
{
intent: "SlottedIntent",
slots: [
{name: "SlotName", type: "SLOT_TYPE"},
],
},
{
intent: "MultipleSlots",
slots: [
{name: "SlotA", type: "SLOT_TYPE"},
{name: "SlotB", type: "SLOT_TYPE"},
],
},
],
};
const sampleUtterances = {
"AFirstIntent": ["default"],
"AMAZON.CancelIntent": ["cancel it now"],
"MultipleSlots": ["multiple {SlotA} and {SlotB}", "reversed {SlotB} then {SlotA}"],
"Play": ["play", "play next", "play now", "PLAY case"],
"SlottedIntent": ["slot {SlotName}"],
};
describe("#utter", () => {
let virtualAlexa: VirtualAlexa;
beforeEach(() => {
virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
});
afterEach(async () => {
await virtualAlexa.resetFilter().endSession();
});
it("Utters simple phrase", async () => {
const response = await virtualAlexa.filter((request) => {
assert.isUndefined(request.context.System.device.deviceId);
assert.isUndefined(request.context.System.apiEndpoint, "https://external.amazonalexa.com");
assert.isDefined(request.context.System.device.supportedInterfaces.AudioPlayer);
assert.isDefined(request.context.System.user.userId);
assert.isUndefined(request.context.System.user.permissions);
assert.equal(request.request.intent.name, "Play");
}).utter("play now") as SkillResponse;
// Test the response object
assert.equal(response.prompt(), "SSML");
assert.equal(response.reprompt(), "TEXT");
assert.equal(response.card().content, "content");
assert.equal(response.cardImage().smallImageUrl, "smallImageUrl");
assert.equal(response.cardContent(), "content");
assert.equal(response.cardTitle(), "title");
assert.equal(response.cardLargeImage(), "largeImageUrl");
assert.equal(response.cardSmallImage(), "smallImageUrl");
assert.equal(response.attr("counter"), "0");
assert.equal(response.attrs("counter", "key1").counter, "0");
assert.isUndefined(response.attrs("counter", "key1").key1);
});
it("Utters simple phrase with different case", async () => {
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "Play");
}).utter("play NOW");
});
it("Utters simple phrase with different case where sample is upper case", async () => {
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "Play");
}).utter("play case");
});
it("Utters slotted phrase", async () => {
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.slots.SlotName.value, "my slot");
}).utter("Slot my slot");
});
it("Utters slotted phrase with no space", async () => {
let exceptionCatched = false;
// Make sure our regular expression expects a space for between sample phrase and slot
try {
await virtualAlexa.utter("Slotmy slot");
} catch (e) {
exceptionCatched = true;
assert.equal(e.message, "Unable to match utterance: Slotmy slot to an intent. " +
"Try a different utterance, or explicitly set the intent");
}
assert.equal(exceptionCatched, true);
});
it("Utters slotted phrase with no space, promise catch", (done) => {
try {
virtualAlexa.utter("Slotmy slot");
} catch (e) {
assert.equal(e.message, "Unable to match utterance: Slotmy slot to an intent. " +
"Try a different utterance, or explicitly set the intent");
done();
}
});
it("Utters builtin intent", async () => {
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "AMAZON.CancelIntent");
}).utter("cancel");
});
it("Utters builtin intent with custom phrase", async () => {
await virtualAlexa.filter((request) => {
assert.equal(request.request.intent.name, "AMAZON.CancelIntent");
}).utter("cancel it now");
});
it("Utters builtin intent not in schema", async () => {
let exceptionCatched = false;
try {
await virtualAlexa.utter("page up");
} catch (e) {
exceptionCatched = true;
assert.equal(e.message, "Unable to match utterance: page up to an intent. " +
"Try a different utterance, or explicitly set the intent");
}
assert.equal(exceptionCatched, true);
});
it("Utters phrases and maintains session", async () => {
// Calls our dummy skill twice
// Inside the skill, it increments a counter by 1 each time
let response = await virtualAlexa.utter("play now") as SkillResponse;
assert.equal(response.sessionAttributes.counter, 0);
response = await virtualAlexa.utter("play now") as SkillResponse;
assert.equal(response.sessionAttributes.counter, 1);
});
it("Utters phrases with launch words", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
const reply = await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "IntentRequest");
assert.equal(request.request.intent.name, "Play");
}).utter("tell skill to play next");
});
});
describe("#utterWithDeviceInfo", () => {
it("Utters simple phrase with device info", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
virtualAlexa.context().device().setID("testID");
virtualAlexa.context().device().audioPlayerSupported(true);
virtualAlexa.context().device().displaySupported(true);
virtualAlexa.context().device().videoAppSupported(true);
await virtualAlexa.filter((request) => {
assert.isDefined(request.context.System.device.deviceId);
assert.equal(request.context.System.apiEndpoint, "https://api.amazonalexa.com");
assert.isDefined(request.context.System.device.supportedInterfaces.AudioPlayer);
assert.isDefined(request.context.System.device.supportedInterfaces.Display);
assert.isDefined(request.context.System.device.supportedInterfaces.VideoApp);
assert.isDefined(request.context.System.user.userId);
assert.isDefined(request.context.System.user.permissions);
assert.isDefined(request.context.System.user.permissions.consentToken);
assert.equal(request.request.intent.name, "Play");
}).utter("play now");
});
it("Removes audio player capability", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
virtualAlexa.context().device().setID("testID");
virtualAlexa.context().device().audioPlayerSupported(false);
await virtualAlexa.filter((request) => {
assert.isUndefined(request.context.System.device.supportedInterfaces.AudioPlayer);
}).utter("play now");
});
});
describe("#intend", () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
afterEach(async () => {
await virtualAlexa.endSession();
});
it("Intends simply", async () => {
const response = await virtualAlexa.intend("Play") as any;
assert.isDefined(response);
assert.isTrue(response.success);
});
it("Intends with filter", async () => {
const reply = await virtualAlexa.filter((request) => {
request.session.sessionId = "Filtered";
}).intend("Play") as SkillResponse;
virtualAlexa.resetFilter();
assert.equal(reply.sessionAttributes.sessionId, "Filtered");
});
it("Intends with slot", async () => {
const response = await virtualAlexa.intend("SlottedIntent", { SlotName: "Value" }) as any;
assert.isDefined(response);
assert.isTrue(response.success);
assert.equal(response.slot.name, "SlotName");
assert.equal(response.slot.value, "Value");
});
it("Intends with slot value but no slots on intent", async () => {
try {
await virtualAlexa.intend("Play", {SlotName: "Value"});
} catch (e) {
console.error(e);
assert.equal(e.message, "Trying to add slot to intent that does not have any slots defined");
}
});
it("Intends with slot value but no slots on intent, promise catch", (done) => {
try {
virtualAlexa.intend("Play", {SlotName: "Value"});
} catch(e) {
assert.equal(e.message, "Trying to add slot to intent that does not have any slots defined");
done();
};
});
it("Intends with slot value but slot does not exist", async () => {
try {
await virtualAlexa.intend("SlottedIntent", {SlotWrongName: "Value"});
} catch (error) {
assert.equal(error.message, "Trying to add undefined slot to intent: SlotWrongName");
}
});
});
describe("#endSession", () => {
it("Starts and Ends Session", (done) => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
virtualAlexa.launch().then(() => {
virtualAlexa.endSession();
done();
});
});
it("Starts and Is Asked To Stop", (done) => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
virtualAlexa.launch().then(() => {
virtualAlexa.utter("stop").then(() => {
assert.isUndefined(virtualAlexa.context().session());
done();
});
});
});
});
describe("#launch", () => {
it("Launches with filter", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
const reply = await virtualAlexa.filter((request) => {
request.session.sessionId = "Filtered";
}).launch();
assert.equal(reply.sessionAttributes.sessionId, "Filtered");
});
it("Launches with list of special utters ", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterances(sampleUtterances)
.intentSchema(intentSchema)
.create();
await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "LaunchRequest");
}).utter("open skill");
await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "LaunchRequest");
}).utter("ask skill");
await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "LaunchRequest");
}).utter("launch skill");
await virtualAlexa.filter((request) => {
assert.equal(request.request.type, "LaunchRequest");
}).utter("talk to skill");
});
});
});
describe("VirtualAlexa Tests Using Custom Function", function() {
it("Calls the custom function correctly", async () => {
const myFunction = function(event: any, context: any) {
context.done(null, { custom: true });
};
const virtualAlexa = VirtualAlexa.Builder()
.handler(myFunction)
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
const reply = await virtualAlexa.filter((request) => {
request.session.sessionId = "Filtered";
}).launch() as any;
assert.isTrue(reply.custom);
});
});
describe("VirtualAlexa Tests Using Node8-style lambda", function() {
it("Handles a promise being returned", async () => {
const myFunction = function(event: any, context: any) {
return new Promise((resolve) => {
resolve({ custom: true });
});
};
const virtualAlexa = VirtualAlexa.Builder()
.handler(myFunction)
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
const reply = await virtualAlexa.filter((request) => {
request.session.sessionId = "Filtered";
}).launch() as any;
assert.isTrue(reply.custom);
});
it("Handles a promise being returned with error", async () => {
const myFunction = function(event: any, context: any) {
return new Promise((resolve, reject) => {
reject("Error");
});
};
const virtualAlexa = VirtualAlexa.Builder()
.handler(myFunction)
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
try {
await virtualAlexa.filter((request) => {
request.session.sessionId = "Filtered";
}).launch();
assert.fail("This should not be reached");
} catch (e) {
assert.equal(e, "Error");
}
});
});
describe("Echo Show Tests", () => {
it("Gets echo display stuff from response", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
virtualAlexa.context().device().setID("testID");
virtualAlexa.context().device().audioPlayerSupported(false);
virtualAlexa.context().device().displaySupported(true);
const response = await virtualAlexa.utter("play now") as SkillResponse;
assert.isDefined(response.display());
assert.equal(response.primaryText(), "PrimaryText");
assert.equal(response.primaryText("ListToken1"), "ListItem1PrimaryText");
assert.isUndefined(response.secondaryText("ListToken1"));
assert.equal(response.secondaryText("ListToken2"), "ListItem2SecondaryText");
assert.equal(response.tertiaryText("ListToken2"), "ListItem2TertiaryText");
});
it("Selects an element", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
virtualAlexa.context().device().setID("testID");
virtualAlexa.context().device().audioPlayerSupported(false);
virtualAlexa.context().device().displaySupported(true);
await virtualAlexa.filter((request) => {
assert.isDefined(request.context.Display);
assert.equal(request.request.type, "Display.ElementSelected");
assert.equal(request.request.token, "ListToken1");
}).selectElement("ListToken1");
});
});
describe("Request Builder tests", () => {
it("Sets JSON values", () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.sampleUtterancesFile("./test/resources/SampleUtterances.txt")
.intentSchemaFile("./test/resources/IntentSchema.json")
.create();
const request = virtualAlexa.request().intent("Play")
.set("request.path1", "value")
.set("request.array[0].prop", "value");
assert.equal(request.json().request.path1, "value");
assert.equal(request.json().request.array[0].prop, "value");
});
});
describe("Catalog tests", () => {
it("Sets JSON values", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModelFile("test/resources/catalogModel/models/en-US.json")
.create();
const response = await virtualAlexa.intend("IngredientIntent", { Ingredient: "cucumber"}) as any;;
assert.isDefined(response);
assert.isTrue(response.success);
assert.equal(response.slot.name, "Ingredient");
assert.equal(response.slot.value, "cucumber");
});
});
describe("Connection Response tests", () => {
it("Sets JSON values", async () => {
const virtualAlexa = VirtualAlexa.Builder()
.handler("test/resources/index.handler")
.interactionModelFile("test/resources/catalogModel/models/en-US.json")
.create();
const request = virtualAlexa.request().inSkillPurchaseResponse("Buy",
"DECLINED",
"ProductId",
"MyToken")
assert.equal(request.json().request.type, "Connections.Response");
assert.equal(request.json().request.payload.productId, "ProductId");
assert.equal(request.json().request.payload.purchaseResult, "DECLINED");
assert.equal(request.json().request.status.code, 200);
assert.equal(request.json().request.status.message, "OK");
});
}); | the_stack |
import { BrowserPlugin } from '../plugins';
import {
CommonEventProperties,
SelfDescribingJson,
TrackerCore,
CorePluginConfiguration,
} from '@snowplow/tracker-core';
import { SharedState } from '../state';
/* Configuration for Anonymous Tracking */
export type AnonymousTrackingOptions = boolean | { withSessionTracking?: boolean; withServerAnonymisation?: boolean };
/* Available configurations for different storage strategies */
export type StateStorageStrategy = 'cookieAndLocalStorage' | 'cookie' | 'localStorage' | 'none';
/* The supported platform values */
export type Platform = 'web' | 'mob' | 'pc' | 'srv' | 'app' | 'tv' | 'cnsl' | 'iot';
/* The supported Cookie SameSite values */
export type CookieSameSite = 'None' | 'Lax' | 'Strict';
/* The supported methods which events can be sent with */
export type EventMethod = 'post' | 'get' | 'beacon';
/**
* The configuration object for initialising the tracker
* @example
* ```
* newTracker('sp1', 'collector.my-website.com', {
* appId: 'my-app-id',
* platform: 'web',
* plugins: [ PerformanceTimingPlugin(), AdTrackingPlugin() ],
* stateStorageStrategy: 'cookieAndLocalStorage'
* });
* ```
*/
export type TrackerConfiguration = {
/**
* Should event properties be base64 encoded where supported
* @defaultValue true
*/
encodeBase64?: boolean;
/**
* The domain all cookies will be set on
* @defaultValue The current domain
*/
cookieDomain?: string;
/**
* The name of the _sp_.id cookie, will rename the _sp_ section
* @defaultValue _sp_
*/
cookieName?: string;
/**
* The SameSite value for the cookie
* {@link https://snowplowanalytics.com/blog/2020/09/07/pipeline-configuration-for-complete-and-accurate-data/}
* @defaultValue None
*/
cookieSameSite?: CookieSameSite;
/**
* Set the Secure flag on the cookie
* @defaultValue true
*/
cookieSecure?: boolean;
/**
* How long the cookie will be set for
* @defaultValue 63072000 (2 years)
*/
cookieLifetime?: number;
/**
* How long until a session expires
* @defaultValue 1800 (30 minutes)
*/
sessionCookieTimeout?: number;
/** The app id to send with each event */
appId?: string;
/**
* The platform the event is being sent from
* @defaultValue web
*/
platform?: Platform;
/**
* Whether the doNotTracK flag should be respected
* @defaultValue false
*/
respectDoNotTrack?: boolean;
/**
* The preferred technique to use to send events
* @defaultValue post
*/
eventMethod?: EventMethod;
/**
* The post path which events will be sent to
* Ensure your collector is configured to accept events on this post path
* @defaultValue '/com.snowplowanalytics.snowplow/tp2'
*/
postPath?: string;
/**
* Should the Sent Timestamp be attached to events
* @defaultValue true
*/
useStm?: boolean;
/**
* The amount of events that should be buffered before sending
* Recommended to leave as 1 to reduce change of losing events
* @defaultValue 1
*/
bufferSize?: number;
/**
* Configure the cross domain linker which will add user identifiers to
* links becaused on the callback
*/
crossDomainLinker?: (elt: HTMLAnchorElement | HTMLAreaElement) => boolean;
/**
* The max size a request can be before the tracker will force send it
* @defaultValue 40000
*/
maxPostBytes?: number;
/**
* Whether the tracker should attempt to figure out what the root
* domain is to store cookies on
*
* This sets cookies to try to determine the root domain, and some cookies may
* fail to save. This is expected behavior.
* @defaultValue false
*/
discoverRootDomain?: boolean;
/**
* The storage strategy which the tracker will use for storing user and session identifers
* and if local storage is allowed for buffering the events
* @defaultValue cookieAndLocalStorage
*/
stateStorageStrategy?: StateStorageStrategy;
/**
* The maximum amount of events that will be buffered in local storage
*
* This is useful to ensure the Tracker doesn't fill the 5MB or 10MB available to
* each website should the collector be unavailable due to lost connectivity.
* Will drop events once the limit is hit
* @defaultValue 1000
*/
maxLocalStorageQueueSize?: number;
/**
* Whether to reset the Activity Tracking counters on a new page view.
* Disabling this leads to legacy behavior due to a "bug".
* Recommended to leave enabled, particularly on SPAs.
* @defaultValue true
*/
resetActivityTrackingOnPageView?: boolean;
/**
* How long to wait before aborting requests to the collector
* @defaultValue 5000 (milliseconds)
*/
connectionTimeout?: number;
/**
* Condifugration for Anonymous Tracking
* @defaultValue false
*/
anonymousTracking?: AnonymousTrackingOptions;
/**
* Use to configure built in contexts
* @defaultValue `{ webPage: true }`
*/
contexts?: { webPage: boolean };
/**
* Inject plugins which will be evaluated for each event
* @defaultValue []
*/
plugins?: Array<BrowserPlugin>;
};
/**
* The data which is passed to the Activity Tracking callback
*/
export type ActivityCallbackData = {
/**
* All context for the activity tracking
* Often generated by the page view events context callback
*/
context: Array<SelfDescribingJson>;
/** The current page view id */
pageViewId: string;
/** The minimum X scroll position for the current page view */
minXOffset: number;
/** The maximum X scroll position for the current page view */
minYOffset: number;
/** The minimum Y scroll position for the current page view */
maxXOffset: number;
/** The maximum Y scroll position for the current page view */
maxYOffset: number;
};
/** The callback for enableActivityTrackingCallback */
export type ActivityCallback = (data: ActivityCallbackData) => void;
/**
* The base configuration for activity tracking
*/
export interface ActivityTrackingConfiguration {
/** The minimum time that must have elapsed before first heartbeat */
minimumVisitLength: number;
/** The interval at which the callback will be fired */
heartbeatDelay: number;
}
/**
* The callback for enableActivityTrackingCallback
*/
export interface ActivityTrackingConfigurationCallback {
/** The callback to fire based on heart beat */
callback: ActivityCallback;
}
/**
* A Page View event
* Used for tracking a page view
*/
export interface PageViewEvent {
/** Override the page title */
title?: string | null;
/** A callback which will fire on the page view and each subsequent activity tracking event for this page view */
contextCallback?: (() => Array<SelfDescribingJson>) | null;
}
/**
* The configuration that can be changed when disabling anonymous tracking
*/
export interface DisableAnonymousTrackingConfiguration {
/* Available configurations for different storage strategies */
stateStorageStrategy?: StateStorageStrategy;
}
/**
* The configuration that can be changed when enabling anonymous tracking
*/
export interface EnableAnonymousTrackingConfiguration {
/* Configuration for Anonymous Tracking */
options?: AnonymousTrackingOptions;
/* Available configurations for different storage strategies */
stateStorageStrategy?: StateStorageStrategy;
}
/**
* The configuration that can be changed when enabling anonymous tracking
*/
export interface ClearUserDataConfiguration {
/* Store session information in memory for subsequent events */
preserveSession: boolean;
/* Store user information in memory for subsequent events */
preserveUser: boolean;
}
/**
* The configuration that can be changed when flushing the buffer
*/
export interface FlushBufferConfiguration {
/* The size of the buffer after this flush */
newBufferSize?: number;
}
/**
* The configuration of the plugin to add
*/
export interface BrowserPluginConfiguration extends CorePluginConfiguration {
/* The plugin to add */
plugin: BrowserPlugin;
}
/**
* The Browser Tracker
*/
export interface BrowserTracker {
/** The unique identifier of this tracker */
id: string;
/** The tracker namespace */
namespace: string;
/** The instance of the core library which this tracker has initialised */
core: TrackerCore;
/** The instance of shared state this tracker is using */
sharedState: SharedState;
/**
* Get the domain session index also known as current memorized visit count.
*
* @returns Domain session index
*/
getDomainSessionIndex: () => void;
/**
* Get the current page view ID
*
* @returns Page view ID
*/
getPageViewId: () => void;
/**
* Get the cookie name as cookieNamePrefix + basename + . + domain.
*
* @returns Cookie name
*/
getCookieName: (basename: string) => void;
/**
* Get the current user ID (as set previously with setUserId()).
*
* @returns Business-defined user ID
*/
getUserId: () => void;
/**
* Get visitor ID (from first party cookie)
*
* @returns Visitor ID (or null, if not yet known)
*/
getDomainUserId: () => void;
/**
* Get the visitor information (from first party cookie)
*
* @returns The domain user information array
*/
getDomainUserInfo: () => void;
/**
* Override referrer
*
* @param url - the custom referrer
*/
setReferrerUrl: (url: string) => void;
/**
* Override url
*
* @param url - The custom url
*/
setCustomUrl: (url: string) => void;
/**
* Override document.title
*
* @param title - The document title
*/
setDocumentTitle: (title: string) => void;
/**
* Strip hash tag (or anchor) from URL
*
* @param enableFilter - whether to enable this feature
*/
discardHashTag: (enableFilter: boolean) => void;
/**
* Strip braces from URL
*
* @param enableFilter - whether to enable this feature
*/
discardBrace: (enableFilter: boolean) => void;
/**
* Set first-party cookie path
*
* @param path - The path for cookies
*/
setCookiePath: (path: string) => void;
/**
* Set visitor cookie timeout (in seconds)
*
* @param timeout - The timeout for the user identifier cookie
*/
setVisitorCookieTimeout: (timeout: number) => void;
/**
* Expires current session and starts a new session.
*/
newSession: () => void;
/**
* Enable querystring decoration for links pasing a filter
*
* @param crossDomainLinkerCriterion - Function used to determine which links to decorate
*/
crossDomainLinker: (crossDomainLinkerCriterion: (elt: HTMLAnchorElement | HTMLAreaElement) => boolean) => void;
/**
* Enables page activity tracking (sends page
* pings to the Collector regularly).
*
* @param configuration - The activity tracking configuration
*/
enableActivityTracking: (configuration: ActivityTrackingConfiguration) => void;
/**
* Enables page activity tracking (replaces collector ping with callback).
*
* @param configuration - The activity tracking configuration
*/
enableActivityTrackingCallback: (
configuration: ActivityTrackingConfiguration & ActivityTrackingConfigurationCallback
) => void;
/**
* Triggers the activityHandler manually to allow external user defined
* activity. i.e. While watching a video
*/
updatePageActivity: () => void;
/**
* Sets the opt out cookie.
*
* @param name - of the opt out cookie
*/
setOptOutCookie: (name?: string | null) => void;
/**
* Set the business-defined user ID for this user.
*
* @param userId - The business-defined user ID
*/
setUserId: (userId?: string | null) => void;
/**
* Set the business-defined user ID for this user using the location querystring.
*
* @param querystringField - Name of a querystring name-value pair
*/
setUserIdFromLocation: (querystringField: string) => void;
/**
* Set the business-defined user ID for this user using the referrer querystring.
*
* @param querystringField - Name of a querystring name-value pair
*/
setUserIdFromReferrer: (querystringField: string) => void;
/**
* Set the business-defined user ID for this user to the value of a cookie.
*
* @param cookieName - Name of the cookie whose value will be assigned to businessUserId
*/
setUserIdFromCookie: (cookieName: string) => void;
/**
* Specify the Snowplow collector URL. Specific http or https to force it
* or leave it off to match the website protocol.
*
* @param collectorUrl - The collector URL, with or without protocol
*/
setCollectorUrl: (collectorUrl: string) => void;
/**
* Alter buffer size
* Can be useful if you want to stop batching requests to ensure events start
* sending closer to event creation
*
* @param newBufferSize - The new buffer size that will be used for all future tracking
*/
setBufferSize: (newBufferSize: number) => void;
/**
* Send all events in the outQueue
* Only need to use this when sending events with a bufferSize of at least 2
*
* @param configuration - The configuration to use following flushing the buffer
*/
flushBuffer: (configuration?: FlushBufferConfiguration) => void;
/**
* Stop regenerating `pageViewId` (available from `web_page` context)
*/
preservePageViewId: () => void;
/**
* Log visit to this page
*
* @param event - The Page View Event properties
*/
trackPageView: (event?: PageViewEvent & CommonEventProperties) => void;
/**
* Disables anonymous tracking if active (ie. tracker initialized with `anonymousTracking`)
* For stateStorageStrategy override, uses supplied value first,
* falls back to one defined in initial config, otherwise uses cookieAndLocalStorage.
*
* @param configuration - The configuration to use following disabling anonymous tracking
*/
disableAnonymousTracking: (configuration?: DisableAnonymousTrackingConfiguration) => void;
/**
* Enables anonymous tracking (ie. tracker initialized without `anonymousTracking`)
*
* @param configuration - The configuration to use following activating anonymous tracking
*/
enableAnonymousTracking: (configuration?: EnableAnonymousTrackingConfiguration) => void;
/**
* Clears all cookies and local storage containing user and session identifiers
*/
clearUserData: (configuration?: ClearUserDataConfiguration) => void;
/**
* Add a plugin into the plugin collection after Tracker has already been initialised
* @param configuration - The plugin to add
*/
addPlugin: (configuration: BrowserPluginConfiguration) => void;
} | the_stack |
import { app, BrowserWindow, BrowserWindowConstructorOptions } from 'electron';
import { Injector, ConstructorOf } from '@opensumi/di';
import {
createContributionProvider,
ContributionProvider,
URI,
ExtensionCandidate,
IEventBus,
EventBusImpl,
asExtensionCandidate,
} from '@opensumi/ide-core-common';
import { IElectronMainLifeCycleService } from '@opensumi/ide-core-common/lib/electron';
import { argv } from '@opensumi/ide-core-common/lib/node/cli';
import { ElectronMainModule } from '../electron-main-module';
import { ElectronMainApiRegistryImpl, ElectronURLHandlerRegistryImpl } from './api';
import { serviceProviders } from './services';
import { WindowDestroyedEvent, WindowCreatedEvent } from './services/events';
import { ICodeWindowOptions } from './types';
import {
ElectronAppConfig,
ElectronMainApiRegistry,
ElectronMainContribution,
IElectronMainApp,
IElectronMainApiProvider,
IParsedArgs,
ElectronURLHandlerRegistry,
} from './types';
import { CodeWindow } from './window';
export interface IWindowOpenOptions {
windowId: number;
// @deprecated
replace?: boolean;
}
export class ElectronMainApp {
private codeWindows: Map<number, CodeWindow> = new Map();
private injector: Injector;
private modules: ElectronMainModule[] = [];
private parsedArgs: IParsedArgs = {
extensionDir: argv.extensionDir as string | undefined,
extensionCandidate: argv.extensionCandidate
? Array.isArray(argv.extensionCandidate)
? argv.extensionCandidate
: [argv.extensionCandidate]
: [],
extensionDevelopmentPath: argv.extensionDevelopmentPath as string | undefined,
};
constructor(private config: ElectronAppConfig) {
this.injector = config.injector || new Injector();
config.extensionDir = this.parsedArgs.extensionDir ? this.parsedArgs.extensionDir : config.extensionDir || '';
config.extensionCandidate = [
...config.extensionCandidate,
...this.parsedArgs.extensionCandidate.map((e) => asExtensionCandidate(e, false)),
];
if (this.parsedArgs.extensionDevelopmentPath) {
config.extensionCandidate = config.extensionCandidate.concat(
Array.isArray(this.parsedArgs.extensionDevelopmentPath)
? this.parsedArgs.extensionDevelopmentPath.map((e) => asExtensionCandidate(e, true))
: [asExtensionCandidate(this.parsedArgs.extensionDevelopmentPath, true)],
);
}
config.extensionDevelopmentHost = !!this.parsedArgs.extensionDevelopmentPath;
this.injector.addProviders(
{
token: IEventBus,
useClass: EventBusImpl,
},
{
token: ElectronAppConfig,
useValue: config,
},
{
token: IElectronMainApp,
useValue: this,
},
{
token: ElectronURLHandlerRegistry,
useClass: ElectronURLHandlerRegistryImpl,
},
{
token: ElectronMainApiRegistry,
useClass: ElectronMainApiRegistryImpl,
},
...serviceProviders,
);
this.injectLifecycleApi();
createContributionProvider(this.injector, ElectronMainContribution);
this.createElectronMainModules(this.config.modules);
this.onBeforeReadyContribution();
this.registerMainApis();
this.registerURLHandlers();
}
async init() {
await app.whenReady().then(() => {
this.onStartContribution();
});
}
registerMainApis() {
for (const contribution of this.contributions) {
if (contribution.registerMainApi) {
contribution.registerMainApi(this.injector.get(ElectronMainApiRegistry));
}
}
}
registerURLHandlers() {
for (const contribution of this.contributions) {
if (contribution.registerURLHandler) {
contribution.registerURLHandler(this.injector.get(ElectronURLHandlerRegistry));
}
}
}
onStartContribution() {
for (const contribution of this.contributions) {
if (contribution.onStart) {
contribution.onStart();
}
}
}
onBeforeReadyContribution() {
for (const contribution of this.contributions) {
if (contribution.beforeAppReady) {
contribution.beforeAppReady();
}
}
}
loadWorkspace(
workspace?: string,
metadata: any = {},
options: BrowserWindowConstructorOptions & ICodeWindowOptions = {},
openOptions?: IWindowOpenOptions,
): CodeWindow {
const formattedWorkspace = this.formatWorkspace(workspace);
if (openOptions && openOptions.windowId) {
const lastWindow = this.getCodeWindowByElectronBrowserWindowId(openOptions.windowId);
if (lastWindow) {
lastWindow.setWorkspace(formattedWorkspace!);
lastWindow.metadata = metadata;
lastWindow.reload();
return lastWindow;
}
}
const window = this.injector.get(CodeWindow, [formattedWorkspace, metadata, options]);
window.start();
if (options.show !== false) {
window.getBrowserWindow().show();
}
const windowId = window.getBrowserWindow().id;
this.codeWindows.set(windowId, window);
window.addDispose({
dispose: () => {
this.injector.get(IEventBus).fire(new WindowDestroyedEvent(window));
this.codeWindows.delete(windowId);
},
});
this.injector.get(IEventBus).fire(new WindowCreatedEvent(window));
return window;
}
get contributions() {
return (
this.injector.get(ElectronMainContribution) as ContributionProvider<ElectronMainContribution>
).getContributions();
}
getCodeWindows() {
return Array.from(this.codeWindows.values());
}
getCodeWindowByElectronBrowserWindowId(id: number) {
for (const window of this.getCodeWindows()) {
if (window.getBrowserWindow() && window.getBrowserWindow().id === id) {
return window;
}
}
}
getCodeWindowByWorkspace(workspace: string) {
const normalizeUri = URI.isUriString(workspace) ? URI.parse(workspace) : URI.file(workspace);
for (const codeWindow of this.getCodeWindows()) {
if (codeWindow.workspace && codeWindow.workspace.toString() === normalizeUri.toString()) {
return codeWindow;
}
}
}
private createElectronMainModules(Constructors: Array<ConstructorOf<ElectronMainModule>> = []) {
for (const Constructor of Constructors) {
this.modules.push(this.injector.get(Constructor));
}
for (const instance of this.modules) {
if (instance.providers) {
this.injector.addProviders(...instance.providers);
}
if (instance.contributionProvider) {
if (Array.isArray(instance.contributionProvider)) {
for (const contributionProvider of instance.contributionProvider) {
createContributionProvider(this.injector, contributionProvider);
}
} else {
createContributionProvider(this.injector, instance.contributionProvider);
}
}
}
}
private injectLifecycleApi() {
const registry: ElectronMainApiRegistry = this.injector.get(ElectronMainApiRegistry);
registry.registerMainApi(IElectronMainLifeCycleService, new ElectronMainLifeCycleApi(this));
}
/**
* 兼容不规范的 url 比如 Windows "file://C:\\path\\to\\測試.html?background=#hash=title1"
* 要转换为 c:\path\to\測試.html
* @param workspace
* @returns string | undefined
*/
private formatWorkspace(workspace?: string): string | undefined {
if (!workspace) {
return undefined;
}
if (URI.isUriString(workspace)) {
// 注意这里如果有 unicode 的字符,获取正确的路径:
// 需要 URI.parse().codeUri.fsPath 或者 URI.parse().codeUri.toString(true)
return new URL(workspace).toString();
} else {
return URI.file(workspace).toString();
}
}
}
class ElectronMainLifeCycleApi implements IElectronMainApiProvider<void> {
eventEmitter: undefined;
constructor(private app: ElectronMainApp) {}
openWorkspace(workspace: string, openOptions: IWindowOpenOptions) {
if (workspace) {
for (const window of this.app.getCodeWindows()) {
if (window.workspace && window.workspace.toString() === workspace) {
window.getBrowserWindow().show();
return;
}
}
}
this.app.loadWorkspace(workspace, {}, {}, openOptions);
}
minimizeWindow(windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
window.minimize();
}
}
fullscreenWindow(windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
window.setFullScreen(true);
}
}
maximizeWindow(windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
window.maximize();
}
}
unmaximizeWindow(windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
window.unmaximize();
}
}
closeWindow(windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
const codeWindow = this.app.getCodeWindowByElectronBrowserWindowId(windowId);
if (!codeWindow) {
window.close();
return;
}
if (codeWindow.isReloading) {
codeWindow.isReloading = false;
if (!codeWindow.isRemote) {
// reload 的情况下不需要等待 startNode 执行完
// 所以可以同时执行 startNode 和 reload 前端
codeWindow.startNode();
}
window.webContents.reload();
} else {
// 正常关闭窗口的情况下,需要回收子进程,耗时可能会比较长
// 这里先隐藏窗口,体感会更快
window.hide();
codeWindow.clear().finally(() => {
window.close();
});
}
}
}
reloadWindow(windowId: number) {
const codeWindow = this.app.getCodeWindowByElectronBrowserWindowId(windowId);
if (codeWindow) {
codeWindow.reload();
}
}
setExtensionDir(extensionDir: string, windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
const codeWindow = this.app.getCodeWindowByElectronBrowserWindowId(windowId);
if (codeWindow) {
codeWindow.setExtensionDir(extensionDir);
}
}
}
setExtensionCandidate(candidate: ExtensionCandidate[], windowId: number) {
const window = BrowserWindow.fromId(windowId);
if (window) {
const codeWindow = this.app.getCodeWindowByElectronBrowserWindowId(windowId);
if (codeWindow) {
codeWindow.setExtensionCandidate(candidate);
}
}
}
} | the_stack |
import { Builder } from '../lib';
import { Prop } from '../constants';
import type { SectionElementBuilder, Undefinable } from '../types';
import type { OptionBuilder } from '../bits/option';
export abstract class Accessory extends Builder {
/**
* @description Adds an element to the section block of your view or message.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 1 element.
* * Can be any one of the elements.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public accessory(element: Undefinable<SectionElementBuilder>): this {
return this.set(element, Prop.Accessory);
}
}
export abstract class ActionId extends Builder {
/**
* @description Sets a string to be an identifier for the action taken by the user. It is sent back to your app in the interaction payload when the element is interacted or when the view is submitted.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * Each element in a view or message must have its own unique action ID.
* * Maximum of 255 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public actionId(actionId: Undefinable<string>): this {
return this.set(actionId, Prop.ActionId);
}
}
export abstract class AltText extends Builder {
/**
* @description This a plain-text summary of the image element or block.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * Should not contain any markup.
* * Maximum of 2000 characters.
*
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public altText(altText: Undefinable<string>): this {
return this.set(altText, Prop.AltText);
}
}
export abstract class BlockId extends Builder {
/**
* @description Sets a string to be an identifier for any given block in a view or message. This is sent back to your app in interaction payloads and view submissions for your app to process.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 255 characters.
* * Each block in a view or message must have its own unique action ID.
* * If the contents of a block is updated, the block ID should also be updated.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public blockId(blockId: Undefinable<string>): this {
return this.set(blockId, Prop.BlockId);
}
}
export abstract class CallbackId extends Builder {
/**
* @description Sets a string for your view that is sent back to your server in all of the interaction payloads and view submissions. Used to identify the view from which the interaction payload is generated.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 255 characters.
* * It is recommended that sensitive data not be stored in the callback ID. Instead, use the `privateMetaData()` method.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public callbackId(callbackId: Undefinable<string>): this {
return this.set(callbackId, Prop.CallbackId);
}
}
export abstract class Channel extends Builder {
/**
* @description Sets the Slack channel ID to which the message will be sent via the API.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public channel(channel: Undefinable<string>): this {
return this.set(channel, Prop.Channel);
}
}
export abstract class Close extends Builder {
/**
* @description Sets the text displayed on the button that closes the view.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 24 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public close(closeButtonText: Undefinable<string>): this {
return this.set(closeButtonText, Prop.Close);
}
}
export abstract class Color extends Builder {
/**
* @description Sets the color for the blockquote border to the left of the attachment.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public color(color: Undefinable<string>): this {
return this.set(color, Prop.Color);
}
}
export abstract class Confirm<T> extends Builder {
/**
* @description For confirmation dialogs, sets the text of the button that confirms the action to which the confirmation dialog has been added. For elements, adds a confirmation dialog that is displayed when the user interacts with the element to confirm the selection or action.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public confirm(value: Undefinable<T>): this {
return this.set(value, Prop.Confirm);
}
}
export abstract class Deny extends Builder {
/**
* @description Sets the text of the button that is meant to cancel the action or which the confirmation dialog was called.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * Maximum of 30 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public deny(text: Undefinable<string>): this {
return this.set(text, Prop.Deny);
}
}
export abstract class Description extends Builder {
/**
* @description Sets the descriptive text displayed below the text field of the option.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 75 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public description(description: Undefinable<string>): this {
return this.set(description, Prop.Description);
}
}
export abstract class Element<T> extends Builder {
/**
* @description Adds a user input element to the input block for gathering information from the user.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * Maximum of 1 element.
* * Supports text inputs, select and multi-select menus, as well as date pickers and checkbox inputs.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public element(element: Undefinable<T>): this {
return this.set(element, Prop.Element);
}
}
export abstract class ExternalId extends Builder {
/**
* @description Sets a custom identifier for a view or file that must be unique for all views on a per-team basis.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 255 characters.
* * When used, an external ID must be unique to a certain view.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public externalId(externalId: Undefinable<string>): this {
return this.set(externalId, Prop.ExternalId);
}
}
export abstract class Fallback extends Builder {
/**
* @description Sets the plain text summary of the attachment used in clients that can't display formatted text (eg. IRC, mobile notifications).
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public fallback(text: Undefinable<string>): this {
return this.set(text, Prop.Fallback);
}
}
export abstract class Hint extends Builder {
/**
* @description Adds a hint below the input block to provide users with more context or instructions for using the input.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 2000 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public hint(hint: Undefinable<string>): this {
return this.set(hint, Prop.Hint);
}
}
export abstract class ImageUrl extends Builder {
/**
* @description Sets the source URL for the image block or element that you wish to include in your view or message.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * Maximum of 2000 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public imageUrl(url: Undefinable<string>): this {
return this.set(url, Prop.ImageUrl);
}
}
export abstract class InitialChannel extends Builder {
/**
* @description Pre-populates the menu with a selected, default channel.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialChannel(channelId: Undefinable<string>): this {
return this.set(channelId, Prop.InitialChannel);
}
}
export abstract class InitialConversation extends Builder {
/**
* @description Pre-populates the menu with a selected, default conversation.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialConversation(conversationId: Undefinable<string>): this {
return this.set(conversationId, Prop.InitialConversation);
}
}
export abstract class InitialDate extends Builder {
/**
* @description Pre-populates the date picker with a selected, default date.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialDate(date: Undefinable<Date>): this {
return this.set(date, Prop.InitialDate);
}
}
export abstract class InitialOption extends Builder {
/**
* @description Pre-populates the menu or date picker with a selected, default option.
*
* **Slack Validation Rules and Tips:**
* * Must be an exact match for one of the options in the menu.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialOption(option: Undefinable<OptionBuilder>): this {
return this.set(option, Prop.InitialOption);
}
}
export abstract class InitialTime extends Builder {
/**
* @description Pre-populates the time picker with a selected, default time.
*
* **Slack Validation Rules and Tips:**
* * Set in HH:mm format, where HH is 24-hour hour format and mm is minutes with a leading zero.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialTime(time: Undefinable<string>): this {
return this.set(time, Prop.InitialTime);
}
}
export abstract class InitialUser extends Builder {
/**
* @description Pre-populates the menu with a selected, default user.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialUser(userId: Undefinable<string>): this {
return this.set(userId, Prop.InitialUser);
}
}
export abstract class InitialValue extends Builder {
/**
* @description Pre-populates the text input with default, editable text.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public initialValue(value: Undefinable<string>): this {
return this.set(value, Prop.InitialValue);
}
}
export abstract class Label extends Builder {
/**
* @description For input blocks, this sets the text displayed in the label for the input. For an option group, it defines the text displayed as a title for the group.
*
* **Slack Validation Rules and Tips:**
* * **Required** ⚠
* * For input blocks, maximum of 2000 characters.
* * For option groups, maximum of 75 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public label(label: Undefinable<string>): this {
return this.set(label, Prop.Label);
}
}
export abstract class MaxLength extends Builder {
/**
* @description Sets a maximum character count allowed in the given text input.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public maxLength(length: Undefinable<number>): this {
return this.set(length, Prop.MaxLength);
}
}
export abstract class MaxSelectedItems extends Builder {
/**
* @description Sets a limit to how many items the user can select in the multi-select menu.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public maxSelectedItems(quantity: Undefinable<number>): this {
return this.set(quantity, Prop.MaxSelectedItems);
}
}
export abstract class MinQueryLength extends Builder {
/**
* @description Sets a minimum number of characters required before Slack queries your app for a list of options.
*
* **Slack Validation Rules and Tips:**
* * If not set, the request will be sent on every character entered or removed.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public minQueryLength(length: Undefinable<number>): this {
return this.set(length, Prop.MinQueryLength);
}
}
export abstract class MinLength extends Builder {
/**
* @description Sets a minimum character count required for the given text input before the user can submit the view.
*
* **Slack Validation Rules and Tips:**
* * Maximum 3000 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public minLength(length: Undefinable<number>): this {
return this.set(length, Prop.MinLength);
}
}
export abstract class Placeholder extends Builder {
/**
* @description Defines the text displayed as a placeholder in the empty input element.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 150 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public placeholder(placeholder: Undefinable<string>): this {
return this.set(placeholder, Prop.Placeholder);
}
}
export abstract class PostAt extends Builder {
/**
* @description Sets a time in the future for the message to be sent to the channel or user, as a scheduled message.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public postAt(timestamp: Undefinable<string>): this {
return this.set(timestamp, Prop.PostAt);
}
}
export abstract class PrivateMetaData extends Builder {
/**
* @description Defines a string sent back to your server with view and interaction payloads.
*
* **Slack Validation Rules and Tips:**
* * Maximum 3000 characters.
* * Typically used to persist data or store context between views.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public privateMetaData(privateMetaData: Undefinable<string>): this {
return this.set(privateMetaData, Prop.PrivateMetaData);
}
}
export abstract class Submit extends Builder {
/**
* @description Sets the text displayed on the button that submits the view.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 24 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public submit(text: Undefinable<string>): this {
return this.set(text, Prop.Submit);
}
}
export abstract class Text extends Builder {
/**
* @description Sets the text displayed for buttons, headers, confirmation dialogs, sections, context blocks, and options.
*
* **Slack Validation Rules and Tips:**
* * **Required for buttons, headers, confirmation dialogs, and option** ⚠
* * **Required for sections if fields are not defined** ⚠
* * For buttons and options, maximum of 75 characters.
* * For confirmation dialogs, maximum of 75 characters.
* * For section and header blocks, maximum of 3000 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public text(text: Undefinable<string>): this {
return this.set(text, Prop.Text);
}
}
export abstract class Title extends Builder {
/**
* @description Sets the title displayed for the block, element, or confirmation dialog.
*
* **Slack Validation Rules and Tips:**
* * **Required for views and confirmation dialogs** ⚠
* * For views, maximum of 24 characters.
* * For images, maximum of 2000 characters.
* * For confirmation dialogs, maximum of 100 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public title(title: Undefinable<string>): this {
return this.set(title, Prop.Title);
}
}
export abstract class ThreadTs extends Builder {
/**
* @description Instructs the Slack API to send the message to the thread of the message associated with the timestamp.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public threadTs(threadTs: Undefinable<string>): this {
return this.set(threadTs, Prop.ThreadTs);
}
}
export abstract class Ts extends Builder {
/**
* @description Instructs the Slack API to use the message to replaced an existing message.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public ts(ts: Undefinable<string>): this {
return this.set(ts, Prop.Ts);
}
}
export abstract class Url extends Builder {
/**
* @description Sets the URL to which the user is redirected when interacting with a button or option.
*
* **Slack Validation Rules and Tips:**
* * Maximum of 3000 characters.
* * For options, it is only supported for options in an overflow menu.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public url(url: Undefinable<string>): this {
return this.set(url, Prop.Url);
}
}
export abstract class Value extends Builder {
/**
* @description Sets a value to be sent to your app when a user interacts with a button or option.
*
* **Slack Validation Rules and Tips:**
* * For buttons, maximum of 2000 characters.
* * For options, maximum of 75 characters.
*
* {@link https://api.slack.com/block-kit|Open Official Slack Block Kit Documentation}
* {@link https://www.blockbuilder.dev|Open Block Builder Documentation}
*/
public value(value: Undefinable<string>): this {
return this.set(value, Prop.Value);
}
} | the_stack |
import {
Texture,
Container,
Graphics,
InteractionData,
InteractionEvent,
Loader,
Sprite,
DisplayObject,
} from 'pixi.js';
import { ObjectView } from './ObjectView';
import { TileView } from './TileView';
import { MoveEngine, IMovable, ITweenTarget } from './MoveEngine';
import { GridNode } from '../pathFinding/GridNode';
import { PathFinding } from '../pathFinding/PathFinding';
import { existy, getDist, isInPolygon, mathMap } from '../utils/calculations';
import { KEY_EMPTY_TILE, KEY_NO_OBJECTS, PF_ALGORITHMS } from '../utils/constants';
import { trace } from '../utils/trace';
import {
TColumnRowPair,
getDirBetween,
TMapData,
IMapDataObject,
MapDataObjectVisual,
ObjectInfoInteractionOffsets,
ObjectInfoTextureNames,
ObjectVisualKey,
TPositionPair,
} from '../utils/map';
/**
* Type declaration for position frame setting.
*/
export type TPositionFrame = {
/** x position of the frame */
x: number;
/** y position of the frame */
y: number;
/** width of the frame */
w: number;
/** height of the frame */
h: number;
};
/**
* Type declaration for engine-specific configuration.
*/
export type TEngineConfiguration = {
/**
* minimum scale that the PIXI.Container for the map can get, default 0.5
* @default 0.5
*/
minScale?: number;
/**
* maximum scale that the PIXI.Container for the map can get, default 1.5
* @default 1.5
*/
maxScale?: number;
/** minimum zoom level, engine defined */
minZoom?: number;
/** maximum zoom level, engine defined */
maxZoom?: number;
/**
* zoom increment amount calculated by the engine according to user settings, default 0.5
* @default 0.5
*/
zoomIncrement?: number;
/**
* used to calculate zoom increment, defined by user, default 5
* @default 5
*/
numberOfZoomLevels?: number;
/**
* initial zoom level of the map, default 0
* @default 0
*/
initialZoomLevel?: number;
/**
* specifies whether to zoom instantly or with a tween animation, default false
* @default false
*/
instantCameraZoom?: boolean;
/**
* height of a single isometric tile, default 74
* @default 74
*/
tileHeight?: number;
/**
* the angle between the top-left edge and the horizontal diagonal of a isometric quad, default 30
* @default 30
*/
isoAngle?: number;
/**
* frame to position the engine, default `{ x : 0, y : 0, w : 800, h : 600 }`
* @default { x : 0, y : 0, w : 800, h : 600 }
*/
initialPositionFrame?: TPositionFrame;
/**
* the type of path finding algorithm two use, default `TRAVISO.PF_ALGORITHMS.ASTAR_ORTHOGONAL`
* @default TRAVISO.PF_ALGORITHMS.ASTAR_ORTHOGONAL
*/
pathFindingType?: number;
/**
* whether to return the path to the closest node if the target is unreachable, default false
* @default false
*/
pathFindingClosest?: boolean;
/**
* defines if the camera will follow the current controllable or not, default true
* @default true
*/
followCharacter?: boolean;
/**
* specifies whether the camera moves instantly or with a tween animation to the target location, default false
* @default false
*/
instantCameraRelocation?: boolean;
/**
* specifies whether the map-objects will be moved to target location instantly or with an animation, default false
* @default false
*/
instantObjectRelocation?: boolean;
/**
* Make objects transparent when the controllable is behind them, default true
* @default true
*/
changeTransparencies?: boolean;
/**
* Highlight the path when the current controllable moves on the map, default true
* @default true
*/
highlightPath?: boolean;
/**
* Highlight the target tile when the current controllable moves on the map, default true
* @default true
*/
highlightTargetTile?: boolean;
/**
* Animate the tile highlights, default true
* @default true
*/
tileHighlightAnimated?: boolean;
/**
* Color code for the tile highlight fill (this will be overridden if a highlight-image is defined), default 0x80d7ff
* @default 0x80d7ff
*/
tileHighlightFillColor?: number;
/**
* Alpha value for the tile highlight fill (this will be overridden if a highlight-image is defined), default 0.5
* @default 0.5
*/
tileHighlightFillAlpha?: number;
/**
* Color code for the tile highlight stroke (this will be overridden if a highlight-image is defined), default 0xFFFFFF
* @default 0xFFFFFF
*/
tileHighlightStrokeColor?: number;
/**
* Alpha value for the tile highlight stroke (this will be overridden if a highlight-image is defined), default 1.0
* @default 1.0
*/
tileHighlightStrokeAlpha?: number;
/**
* When a tile selected don't move the controllable immediately but still call 'tileSelectCallback', default false
* @default false
*/
dontAutoMoveToTile?: boolean;
/**
* Looks for a path every time an object moves to a new tile (set to false if you don't have other moving objects on your map), default true
* @default true
*/
checkPathOnEachTile?: boolean;
/**
* Enable dragging the map with touch-and-touchmove or mousedown-and-mousemove on the map, default true
* @default true
*/
mapDraggable?: boolean;
/**
* Background color, if defined the engine will create a solid colored background for the map, default null
* @default null
*/
backgroundColor?: number;
/**
* Creates a mask using the position frame defined by 'initialPositionFrame' property or the 'posFrame' parameter that is passed to 'repositionContent' method, default false
* @default false
*/
useMask?: boolean;
/** The path to the json file that defines map data, required */
mapDataPath: string;
/**
* Array of paths to the assets that are desired to be loaded by traviso, no need to use if assets are already loaded to PIXI cache, default null
* @default null
*/
assetsToLoad?: string[];
/**
* Callback function that will be called once everything is loaded and engine instance is ready, default null
* @default null
*/
engineInstanceReadyCallback?: (engineInstance: EngineView) => unknown;
/**
* Callback function that will be called when a tile is selected (call params will be the row and column indexes of the tile selected), default null
* @default null
*/
tileSelectCallback?: (r: number, c: number) => unknown;
/**
* Callback function that will be called when a tile with an interactive map-object on it is selected (call param will be the object selected), default null
* @default null
*/
objectSelectCallback?: (objectView: ObjectView) => unknown;
/**
* Callback function that will be called when any moving object reaches its destination (call param will be the moving object itself), default null
* @default null
*/
objectReachedDestinationCallback?: (objectView: ObjectView) => unknown;
/**
* Callback function that will be called when any moving object is in move and there are other objects on the next tile, default null
* @default null
*/
otherObjectsOnTheNextTileCallback?: (objectView: ObjectView, otherObjectViews: ObjectView[]) => unknown;
/**
* Callback function that will be called every time an objects direction or position changed, default null
* @default null
*/
objectUpdateCallback?: (objectView: ObjectView) => unknown;
};
/**
* Main PIXI.Container class to hold all views within the engine and all map related logic.
*
* @class EngineView
* @extends PIXI.Container
*/
export class EngineView extends Container {
/**
* Internal property holding the engine configuration.
*
* @property
* @private
* @internal
*/
private _config: TEngineConfiguration;
/**
* The default height of a single isometric tile
*
* @default 74
* @property
* @private
* @static
*/
private static readonly DEFAULT_TILE_H: number = 74;
/**
* The default angle (in degrees) between the top-left edge and the horizontal diagonal of a isometric quad
*
* @default 30
* @property
* @private
* @static
*/
private static readonly DEFAULT_ISO_ANGLE: number = 30;
/**
* Half-height of a single isometric tile
*
* @default 37
* @property
* @public
*/
public readonly tileHalfHeight: number;
/**
* Half-width of a single isometric tile
*
* @default 64
* @property
* @public
*/
public readonly tileHalfWidth: number;
/**
* Variable holding the parsed and processed map data
*
* @property
* @public
*/
public mapData: TMapData;
/**
* MoveEngine instance to handle all animations and tweens
*
* @property
* @public
*/
public moveEngine: MoveEngine;
/**
* Current scale of the map's display object
*
* @property
* @private
* @internal
*/
private _currentScale: number;
/**
* Current zoom amount of the map
*
* @property
* @private
* @internal
*/
private _currentZoom: number;
/**
* Active position frame for the engine.
*
* @property
* @private
* @internal
*/
private _posFrame: TPositionFrame = { x: 0, y: 0, w: 800, h: 600 };
/**
* Active external center point for the engine.
*
* @property
* @private
* @internal
*/
private _externalCenter: TPositionPair;
/**
* Solid colored background
*
* @property
* @private
* @internal
*/
private _bg: Graphics;
/**
* Mask graphics for the mask
*
* @property
* @private
* @internal
*/
private _mapMask: Graphics;
/**
* Display object for the map visuals
*
* @property
* @private
* @internal
*/
private _mapContainer: Container;
/**
* Display object for the ground/terrain visuals
*
* @property
* @private
* @internal
*/
private _groundContainer: Container;
/**
* Display object for the map-object visuals
*
* @property
* @private
* @internal
*/
private _objContainer: Container;
/**
* Number of rows in the isometric map
*
* @property
* @private
* @internal
*/
private _mapSizeR: number;
/**
* Number of columns in the isometric map
*
* @property
* @private
* @internal
*/
private _mapSizeC: number;
/**
* Array to hold map-tiles
*
* @property
* @private
* @internal
*/
private _tileArray: TileView[][];
/**
* Array to hold map-objects
*
* @property
* @private
* @internal
*/
private _objArray: ObjectView[][][];
/**
* PathFinding instance to handle all path finding logic
*
* @property
* @private
* @internal
*/
private _pathFinding: PathFinding;
/**
* Current controllable map-object that will be the default object to move in user interactions
*
* @property
* @private
* @internal
*/
private _currentControllable: ObjectView;
/**
* Vertices of the map
*
* @property
* @private
* @internal
*/
private _mapVertices: number[][];
/**
* Total width of all ground tiles
*
* @property
* @private
* @internal
*/
private _mapVisualWidthReal: number;
/**
* Total height of all ground tiles
*
* @property
* @private
* @internal
*/
private _mapVisualHeightReal: number;
/**
* @property
* @private
* @internal
*/
private _currentFocusLocation: TColumnRowPair;
/**
* @property
* @private
* @internal
*/
private _mapVisualWidthScaled: number;
/**
* @default `false`
* @property
* @private
* @internal
*/
private _dragging: boolean = false;
/**
* @property
* @private
* @internal
*/
private _dragInitStartingX: number;
/**
* @property
* @private
* @internal
*/
private _dragInitStartingY: number;
/**
* @property
* @private
* @internal
*/
private _dragPrevStartingX: number;
/**
* @property
* @private
* @internal
*/
private _dragPrevStartingY: number;
/**
* @property
* @private
* @internal
*/
private onMouseUp_binded: (event: InteractionEvent) => void;
/**
* @property
* @private
* @internal
*/
private onMouseDown_binded: (event: InteractionEvent) => void;
/**
* @property
* @private
* @internal
*/
private onMouseMove_binded: (event: InteractionEvent) => void;
/**
* Constructor method for the main PIXI.Container class to hold all views within the engine and all map related logic.
*
* @constructor
* @param config {TEngineConfiguration} configuration object for the isometric engine instance
*/
constructor(config: TEngineConfiguration) {
super();
this.onMouseUp_binded = this.onMouseUp.bind(this);
this.onMouseDown_binded = this.onMouseDown.bind(this);
this.onMouseMove_binded = this.onMouseMove.bind(this);
this._config = config;
// set the properties that are set by default when not defined by the user
this._config.followCharacter = existy(this._config.followCharacter) ? this._config.followCharacter : true;
this._config.changeTransparencies = existy(this._config.changeTransparencies)
? this._config.changeTransparencies
: true;
this._config.highlightPath = existy(this._config.highlightPath) ? this._config.highlightPath : true;
this._config.highlightTargetTile = existy(this._config.highlightTargetTile)
? this._config.highlightTargetTile
: true;
this._config.tileHighlightAnimated = existy(this._config.tileHighlightAnimated)
? this._config.tileHighlightAnimated
: true;
this._config.tileHighlightFillColor = existy(this._config.tileHighlightFillColor)
? this._config.tileHighlightFillColor
: 0x80d7ff;
this._config.tileHighlightFillAlpha = existy(this._config.tileHighlightFillAlpha)
? this._config.tileHighlightFillAlpha
: 0.5;
this._config.tileHighlightStrokeColor = existy(this._config.tileHighlightStrokeColor)
? this._config.tileHighlightStrokeColor
: 0xffffff;
this._config.tileHighlightStrokeAlpha = existy(this._config.tileHighlightStrokeAlpha)
? this._config.tileHighlightStrokeAlpha
: 1.0;
this._config.dontAutoMoveToTile = existy(this._config.dontAutoMoveToTile)
? this._config.dontAutoMoveToTile
: false;
this._config.checkPathOnEachTile = existy(this._config.checkPathOnEachTile)
? this._config.checkPathOnEachTile
: true;
this._config.mapDraggable = existy(this._config.mapDraggable) ? this._config.mapDraggable : true;
this._config.isoAngle = existy(this._config.isoAngle) ? this._config.isoAngle : EngineView.DEFAULT_ISO_ANGLE;
this._config.tileHeight = existy(this._config.tileHeight) ? this._config.tileHeight : EngineView.DEFAULT_TILE_H;
this.setZoomParameters(
this._config.minScale,
this._config.maxScale,
this._config.numberOfZoomLevels,
this._config.initialZoomLevel,
this._config.instantCameraZoom
);
this.tileHalfHeight = this._config.tileHeight / 2;
this.tileHalfWidth = this.tileHalfHeight * Math.tan(((90 - this._config.isoAngle) * Math.PI) / 180);
// this.TILE_W = this.tileHalfWidth * 2;
this.loadAssetsAndData();
}
/**
* Handles loading of necessary assets and map data for the given engine instance.
*
* @method
* @function
* @private
* @internal
*/
private loadAssetsAndData(): void {
if (!this._config.mapDataPath) {
throw new Error(
"TRAVISO: No JSON-file path defined for map data. Please check your configuration object that you pass to the 'getEngineInstance' method."
);
} else if (this._config.mapDataPath.split('.').pop() !== 'json') {
throw new Error('TRAVISO: Invalid map-data file path. This file has to be a json file.');
}
const loader = new Loader();
loader.add('mapData', this._config.mapDataPath);
if (this._config.assetsToLoad && this._config.assetsToLoad.length > 0) {
loader.add(this._config.assetsToLoad);
}
loader.load(this.assetsAndDataLoaded.bind(this));
// TRAVISO.loadData();
}
/**
* Handles loading of map data for the given engine instance.
*
* @method
* @function
* @private
* @internal
*
* @param loader {Loader} PIXI's loader instance
*/
private assetsAndDataLoaded(loader: Loader): void {
// console.log('assetsAndDataLoaded', resources.mapData.data);
const mapData: TMapData = loader.resources.mapData.data as TMapData;
// initial controls
if (!existy(mapData.initialControllableLocation)) {
trace(
"Map-data file warning: No 'initialControllableLocation' defined. Ignore this warning if you are adding it later manually."
);
} else if (
!existy(mapData.initialControllableLocation.columnIndex) ||
!existy(mapData.initialControllableLocation.rowIndex)
) {
trace("Map-data file warning: 'initialControllableLocation' exists but it is not defined properly.");
mapData.initialControllableLocation = null;
}
if (mapData.tileHighlightImage && !mapData.tileHighlightImage.path) {
trace("Map-data file warning: 'tileHighlightImage' exists but its 'path' is not defined properly.");
mapData.tileHighlightImage = null;
}
if (mapData.singleGroundImage && !mapData.singleGroundImage.path) {
trace("Map-data file warning: 'singleGroundImage' exists but its 'path' is not defined properly.");
mapData.singleGroundImage = null;
}
let i: number, j: number, arr: string[];
let rows = mapData.groundMap;
mapData.groundMapData = [];
for (i = 0; i < rows.length; i++) {
arr = String(rows[i].row).replace(/\s/g, '').split(',');
// remove empty spaces in a row and cast to an array
for (j = arr.length; j--; ) {
arr[j] = arr[j] || KEY_EMPTY_TILE;
}
mapData.groundMapData[i] = arr;
}
rows = mapData.objectsMap;
mapData.objectsMapData = [];
for (i = 0; i < rows.length; i++) {
arr = String(rows[i].row).replace(/\s/g, '').split(',');
// remove empty spaces in a row and cast to an array
for (j = arr.length; j--; ) {
arr[j] = arr[j] || KEY_NO_OBJECTS;
}
mapData.objectsMapData[i] = arr;
}
if (!existy(mapData.tiles)) {
trace("Map-data file warning: No 'tiles' defined.");
mapData.tiles = {};
}
if (!existy(mapData.objects)) {
trace("Map-data file warning: No 'objects' defined.");
mapData.objects = {};
}
let obj: IMapDataObject,
objId: string,
visual: MapDataObjectVisual,
visualId: ObjectVisualKey,
interactionOffsets: ObjectInfoInteractionOffsets,
oTextures: ObjectInfoTextureNames,
m: number,
n: number;
for (objId in mapData.objects) {
obj = mapData.objects[objId];
if (!existy(obj.visuals)) {
throw new Error('TRAVISO: No visuals defined in data-file for object type: ' + objId);
}
obj.id = objId;
if (!existy(obj.rowSpan)) {
obj.rowSpan = 1;
}
if (!existy(obj.columnSpan)) {
obj.columnSpan = 1;
}
oTextures = {};
interactionOffsets = {};
for (visualId in obj.visuals) {
visual = obj.visuals[visualId];
if (existy(visual.ipor) && existy(visual.ipoc)) {
interactionOffsets[visualId] = {
c: Number(visual.ipoc),
r: Number(visual.ipor),
};
}
// visual = (visual as MapDataObjectVisualType1);
if (visual.frames && visual.frames.length > 0) {
oTextures[visualId] = [];
for (m = 0; m < visual.frames.length; m++) {
oTextures[visualId][m] = visual.frames[m].path;
}
} else {
if (!visual.path || !visual.extension || !visual.numberOfFrames || visual.numberOfFrames < 1) {
throw new Error(
'TRAVISO: Invalid or missing visual attributes detected in data-file for visual with id: ' +
visualId
);
}
oTextures[visualId] = [];
if (visual.numberOfFrames === 1) {
oTextures[visualId][0] = visual.path + '.' + visual.extension;
} else {
n = 0;
for (m = visual.startIndex; m < visual.numberOfFrames; m++) {
oTextures[visualId][n++] = visual.path + String(m) + '.' + visual.extension;
}
}
}
}
obj.textureNames = oTextures;
obj.io = interactionOffsets;
obj.f = !!obj.floor;
obj.nt = !!obj.noTransparency;
obj.m = !!obj.movable;
obj.i = !!obj.interactive;
}
delete mapData.objectsMap;
delete mapData.groundMap;
this.mapData = mapData;
this.onAllAssetsLoaded();
}
/**
* This method is being called whenever all the assets are
* loaded and engine is ready to initialize.
*
* @method
* @function
* @private
* @internal
*/
private onAllAssetsLoaded(): void {
trace('All assets loaded');
this.moveEngine = new MoveEngine(this);
this._currentScale = 1.0;
this._currentZoom = 0;
this._posFrame = this._config.initialPositionFrame || {
x: 0,
y: 0,
w: 800,
h: 600,
};
this._externalCenter = {
x: this._posFrame.w >> 1,
y: this._posFrame.h >> 1,
};
this.createMap();
this.repositionContent(this._config.initialPositionFrame);
this.enableInteraction();
if (this._config.engineInstanceReadyCallback) {
this._config.engineInstanceReadyCallback(this);
}
}
/**
* Creates the map and setups necessary parameters for future map calculations.
*
* @method
* @function
* @private
* @internal
*/
private createMap(): void {
// create background
if (this._config.backgroundColor) {
this._bg = new Graphics();
this.addChild(this._bg);
}
// create mask
if (this._config.useMask) {
this._mapMask = new Graphics();
this.addChild(this._mapMask);
}
// create containers for visual map elements
this._mapContainer = new Container();
this.addChild(this._mapContainer);
// Define two layers of maps
// One for the world and one for the objects (static/dynamic) over it
// This enables us not to update the whole world in every move but instead just update the object depths over it
this._groundContainer = new Container();
this._mapContainer.addChild(this._groundContainer);
this._objContainer = new Container();
this._mapContainer.addChild(this._objContainer);
const groundMapData = this.mapData.groundMapData;
const objectsMapData = this.mapData.objectsMapData;
const initialControllableLocation = this.mapData.initialControllableLocation;
// set map size
this._mapSizeR = groundMapData.length;
this._mapSizeC = groundMapData[0].length;
// add ground image first if it is defined
let groundImageSprite: Sprite;
if (this.mapData.singleGroundImage) {
groundImageSprite = new Sprite(Texture.from(this.mapData.singleGroundImage.path));
this._groundContainer.addChild(groundImageSprite);
groundImageSprite.scale.set(this.mapData.singleGroundImage.scale || 1);
}
// create arrays to hold tiles and objects
this._tileArray = [];
this._objArray = [];
let i, j;
for (i = 0; i < this._mapSizeR; i++) {
this._tileArray[i] = [];
this._objArray[i] = [];
for (j = 0; j < this._mapSizeC; j++) {
this._tileArray[i][j] = null;
this._objArray[i][j] = null;
}
}
// Map data is being sent to path finding and after this point
// its content will be different acc to the path-finding algorithm.
// It is still being stored in engine.mapData but you must be aware
// of the structure if you want to use it after this point.
this._pathFinding = new PathFinding(this._mapSizeC, this._mapSizeR, {
diagonal: this._config.pathFindingType === PF_ALGORITHMS.ASTAR_DIAGONAL,
closest: this._config.pathFindingClosest,
});
let tile;
for (i = 0; i < this._mapSizeR; i++) {
for (j = this._mapSizeC - 1; j >= 0; j--) {
this._tileArray[i][j] = null;
if (groundMapData[i][j] && groundMapData[i][j] !== KEY_EMPTY_TILE) {
tile = new TileView(this, groundMapData[i][j]);
tile.position.x = this.getTilePosXFor(i, j);
tile.position.y = this.getTilePosYFor(i, j);
tile.mapPos = { c: j, r: i };
this._tileArray[i][j] = tile;
this._groundContainer.addChild(tile);
if (!tile.isMovableTo) {
this._pathFinding.setCell(j, i, 0);
}
} else {
this._pathFinding.setCell(j, i, 0);
}
}
}
let obj,
floorObjectFound = false;
for (i = 0; i < this._mapSizeR; i++) {
for (j = this._mapSizeC - 1; j >= 0; j--) {
this._objArray[i][j] = null;
if (objectsMapData[i][j] && objectsMapData[i][j] !== KEY_NO_OBJECTS) {
obj = new ObjectView(this, objectsMapData[i][j]);
obj.position.x = this.getTilePosXFor(i, j);
obj.position.y = this.getTilePosYFor(i, j) + this.tileHalfHeight;
obj.mapPos = { c: j, r: i };
if (!floorObjectFound && obj.isFloorObject) {
floorObjectFound = true;
}
this._objContainer.addChild(obj);
this.addObjRefToLocation(obj, obj.mapPos);
// if (initialControllableLocation && initialControllableLocation.c === j && initialControllableLocation.r === i)
if (
initialControllableLocation &&
initialControllableLocation.columnIndex === j &&
initialControllableLocation.rowIndex === i
) {
this._currentControllable = obj;
}
}
}
}
if (floorObjectFound) {
// run the loop again to bring the other objects on top of the floor objects
let a, k;
for (i = 0; i < this._mapSizeR; i++) {
for (j = this._mapSizeC - 1; j >= 0; j--) {
a = this._objArray[i][j];
if (a) {
for (k = 0; k < a.length; k++) {
if (!a[k].isFloorObject) {
this._objContainer.addChild(a[k]);
}
}
}
}
}
}
// cacheAsBitmap: for now this creates problem with tile highlights
// this._groundContainer.cacheAsBitmap = true;
this._mapVertices = [
[this.getTilePosXFor(0, 0) - this.tileHalfWidth, this.getTilePosYFor(0, 0)],
[
this.getTilePosXFor(0, this._mapSizeC - 1),
this.getTilePosYFor(0, this._mapSizeC - 1) - this.tileHalfHeight,
],
[
this.getTilePosXFor(this._mapSizeR - 1, this._mapSizeC - 1) + this.tileHalfWidth,
this.getTilePosYFor(this._mapSizeR - 1, this._mapSizeC - 1),
],
[
this.getTilePosXFor(this._mapSizeR - 1, 0),
this.getTilePosYFor(this._mapSizeR - 1, 0) + this.tileHalfHeight,
],
];
this._mapVisualWidthReal =
this.getTilePosXFor(this._mapSizeR - 1, this._mapSizeC - 1) - this.getTilePosXFor(0, 0);
this._mapVisualHeightReal =
this.getTilePosYFor(this._mapSizeR - 1, 0) - this.getTilePosYFor(0, this._mapSizeC - 1);
if (groundImageSprite) {
groundImageSprite.position.x =
this._mapVertices[0][0] + this.tileHalfWidth + (this._mapVisualWidthReal - groundImageSprite.width) / 2;
groundImageSprite.position.y =
this._mapVertices[1][1] +
this.tileHalfHeight +
(this._mapVisualHeightReal - groundImageSprite.height) / 2;
}
this.zoomTo(this._config.initialZoomLevel, true);
if (this._config.followCharacter && initialControllableLocation) {
// this.centralizeToLocation(initialControllableLocation.c, initialControllableLocation.r, true);
this.centralizeToLocation(
initialControllableLocation.columnIndex,
initialControllableLocation.rowIndex,
true
);
} else {
this.centralizeToCurrentExternalCenter(true);
}
}
/**
* Calculates 2D X position of a tile, given its column and row indices.
*
* @method
* @function
* @public
*
* @param r {number} row index of the tile
* @param c {number} column index of the tile
* @return {number} 2D X position of a tile
*/
public getTilePosXFor(r: number, c: number): number {
return c * this.tileHalfWidth + r * this.tileHalfWidth;
}
/**
* Calculates 2D Y position of a tile, given its column and row indices.
*
* @method
* @function
* @public
*
* @param r {number} row index of the tile
* @param c {number} column index of the tile
* @return {number} 2D Y position of a tile
*/
public getTilePosYFor(r: number, c: number): number {
return r * this.tileHalfHeight - c * this.tileHalfHeight;
}
/**
* Shows or hides the display object that includes the objects-layer
*
* @method
* @function
* @public
*
* @param show {boolean} show the object layer, default `false`
*/
public showHideObjectLayer(show: boolean = false): void {
this._objContainer.visible = show;
}
/**
* Shows or hides the display object that includes the ground/terrain layer
*
* @method
* @function
* @public
*
* @param show {boolean} show the ground layer, default `false`
*/
public showHideGroundLayer(show: boolean = false): void {
this._groundContainer.visible = show;
}
/**
* Returns the TileView instance that sits in the location given by row and column indices.
*
* @method
* @function
* @public
*
* @param r {number} row index of the tile
* @param c {number} column index of the tile
* @return {TileView} the tile in the location given
*/
public getTileAtRowAndColumn(r: number, c: number): TileView {
return this._tileArray[r][c];
}
/**
* Returns all the ObjectView instances referenced to the given location with the specified row and column indices.
*
* @method
* @function
* @public
*
* @param r {number} the row index of the map location
* @param c {number} the column index of the map location
* @return {Array(ObjectView)} an array of map-objects referenced to the given location
*/
public getObjectsAtRowAndColumn(r: number, c: number): ObjectView[] {
return this._objArray[r][c];
}
/**
* Returns all the ObjectView instances referenced to the given location.
*
* @method
* @function
* @public
*
* @param pos {TColumnRowPair} position object including row and column coordinates
* @return {Array(ObjectView)} an array of map-objects referenced to the given location
*/
public getObjectsAtLocation(pos: TColumnRowPair): ObjectView[] {
return this._objArray[pos.r][pos.c];
}
/**
* Creates and adds a predefined (in json file) map-object to the map using the specified object type-id.
*
* @method
* @function
* @public
*
* @param type {number} type-id of the object as defined in the json file
* @param pos {TColumnRowPair} position object including row and column coordinates
* @return {ObjectView} the newly created map-object
*/
public createAndAddObjectToLocation(type: string, pos: TColumnRowPair): ObjectView {
return this.addObjectToLocation(new ObjectView(this, type), pos);
}
/**
* Adds an already-created object to the map.
*
* @method
* @function
* @public
*
* @param obj {ObjectView} a map-object to add to the map and the given location
* @param pos {TColumnRowPair} position object including row and column coordinates
* @return {ObjectView} the newly added object
*/
public addObjectToLocation(obj: ObjectView, pos: TColumnRowPair): ObjectView {
obj.position.x = this.getTilePosXFor(pos.r, pos.c);
obj.position.y = this.getTilePosYFor(pos.r, pos.c) + this.tileHalfHeight;
obj.mapPos = { c: pos.c, r: pos.r };
this._objContainer.addChild(obj);
this.addObjRefToLocation(obj, obj.mapPos);
this.arrangeDepthsFromLocation(obj.isFloorObject ? { c: this._mapSizeC - 1, r: 0 } : obj.mapPos);
return obj;
}
/**
* Enables adding external custom display objects to the specified location.
* This method should be used for the objects that are not already defined in json file and don't have a type-id.
* The resulting object will be independent of engine mechanics apart from depth controls.
*
* @method
* @function
* @public
*
* @param displayObject {PIXI.DisplayObject} object to be added to location
* @param displayObject.isMovableTo {boolean} if the object can be moved onto by other map-objects, default true
* @param displayObject.columnSpan {number} number of tiles that map-object covers horizontally on the isometric map
* @param displayObject.rowSpan {number} number of tiles that map-object covers vertically on the isometric map
* @param pos {TColumnRowPair} position object including row and column coordinates
* @return {PIXI.DisplayObject} the newly added object
*/
public addCustomObjectToLocation(displayObject: ObjectView, pos: TColumnRowPair): DisplayObject {
displayObject.isMovableTo = existy(displayObject.isMovableTo) ? displayObject.isMovableTo : true;
displayObject.columnSpan = displayObject.columnSpan || 1;
displayObject.rowSpan = displayObject.rowSpan || 1;
return this.addObjectToLocation(displayObject, pos);
// this.removeObjRefFromLocation(displayObject, pos);
}
/**
* Removes the object and its references from the map.
*
* @method
* @function
* @public
*
* @param obj {ObjectView} Either an external display object or a map-object (ObjectView)
* @param pos {TColumnRowPair} position object including row and column coordinates. If not defined, the engine will use `obj.mapPos` to remove the map-object
*/
public removeObjectFromLocation(obj: ObjectView, pos: TColumnRowPair): void {
pos = pos || obj.mapPos;
this._objContainer.removeChild(obj);
this.removeObjRefFromLocation(obj, pos);
}
/**
* Centralizes and zooms the EngineView instance to the object specified.
*
* @method focusMapToObject
* @param obj {ObjectView} the object that map will be focused with respect to
* @param obj.mapPos {Object} the object that holds the location of the map-object on the map
* @param obj.mapPos.c {number} the column index of the map location
* @param obj.mapPos.r {number} the row index of the map location
* @param obj.columnSpan {number} number of tiles that map-object covers horizontally on the isometric map
* @param obj.rowSpan {number} number of tiles that map-object covers vertically on the isometric map
*/
public focusMapToObject(obj: ObjectView): void {
this.focusMapToLocation(obj.mapPos.c + (obj.columnSpan - 1) / 2, obj.mapPos.r - (obj.rowSpan - 1) / 2, 0);
}
/**
* Centralizes and zooms the EngineView instance to the map location specified by row and column index.
*
* @method focusMapToLocation
* @param c {number} the column index of the map location
* @param r {number} the row index of the map location
* @param zoomAmount {number} targeted zoom level for focusing
*/
public focusMapToLocation(c: number, r: number, zoomAmount: number): void {
// NOTE: using zoomTo instead of setScale causes centralizeToPoint to be called twice (no visual problem)
this.zoomTo(zoomAmount, false);
this.centralizeToLocation(c, r, this._config.instantCameraRelocation);
}
/**
* Centralizes the EngineView instance to the object specified.
*
* @method centralizeToObject
* @param obj {ObjectView} the object that map will be centralized with respect to
* @param obj.mapPos {Object} the object that holds the location of the map-object on the map
* @param obj.mapPos.c {number} the column index of the map location
* @param obj.mapPos.r {number} the row index of the map location
*/
public centralizeToObject(obj: ObjectView): void {
this.centralizeToLocation(obj.mapPos.c, obj.mapPos.r, this._config.instantCameraRelocation);
}
/**
* Centralizes the EngineView instance to the map location specified by row and column index.
*
* @method centralizeToLocation
* @param c {number} the column index of the map location
* @param r {number} the row index of the map location
* @param [instantRelocate=false] {boolean} specifies if the camera move will be animated or instant
*/
public centralizeToLocation(c: number, r: number, instantRelocate: boolean): void {
this._currentFocusLocation = { c: c, r: r };
const px =
this._externalCenter.x + (this._mapVisualWidthScaled >> 1) - this.getTilePosXFor(r, c) * this._currentScale;
const py = this._externalCenter.y - this.getTilePosYFor(r, c) * this._currentScale;
this.centralizeToPoint(px, py, instantRelocate);
}
/**
* Centralizes the EngineView instance to the current location of the attention/focus.
*
* @method centralizeToCurrentFocusLocation
* @param [instantRelocate=false] {boolean} specifies if the camera move will be animated or instant
*/
public centralizeToCurrentFocusLocation(instantRelocate: boolean): void {
this.centralizeToLocation(this._currentFocusLocation.c, this._currentFocusLocation.r, instantRelocate);
}
/**
* External center is the central point of the frame defined by the user to be used as the visual size of the engine.
* This method centralizes the EngineView instance with respect to this external center-point.
*
* @method
* @function
* @public
*
* @param instantRelocate {boolean} specifies if the camera move will be animated or instant
*/
public centralizeToCurrentExternalCenter(instantRelocate: boolean): void {
if (this._externalCenter) {
this._currentFocusLocation = {
c: this._mapSizeC >> 1,
r: this._mapSizeR >> 1,
};
this.centralizeToPoint(this._externalCenter.x, this._externalCenter.y, instantRelocate);
}
}
/**
* Centralizes the EngineView instance to the points specified.
*
* @method
* @function
* @public
*
* @param px {number} the x coordinate of the center point with respect to EngineView frame
* @param py {number} the y coordinate of the center point with respect to EngineView frame
* @param instantRelocate {boolean} specifies if the relocation will be animated or instant
*/
public centralizeToPoint(px: number, py: number, instantRelocate: boolean): void {
if (this._tileArray) {
px = px - (this._mapVisualWidthScaled >> 1);
if (
(existy(instantRelocate) && instantRelocate) ||
(!existy(instantRelocate) && this._config.instantCameraRelocation)
) {
this._mapContainer.position.x = px;
this._mapContainer.position.y = py;
} else {
this.moveEngine.addTween(
this._mapContainer.position as unknown as ITweenTarget,
0.5,
{ x: px, y: py },
0,
'easeInOut',
true
);
}
}
}
/**
* Sets all the parameters related to zooming in and out.
*
* @method
* @function
* @public
*
* @param minScale {number} minimum scale that the PIXI.Container for the map can get, default 0.5
* @param maxScale {number} maximum scale that the PIXI.Container for the map can get, default 1.5
* @param numberOfZoomLevels {number} used to calculate zoom increment, defined by user, default 5
* @param initialZoomLevel {number} initial zoom level of the map, default 0
* @param instantCameraZoom {boolean} specifies whether to zoom instantly or with a tween animation, default false
*/
public setZoomParameters(
minScale: number = 0.5,
maxScale: number = 1.5,
numberOfZoomLevels: number = 5,
initialZoomLevel: number = 0,
instantCameraZoom: boolean = false
): void {
this._config.minScale = minScale;
this._config.maxScale = maxScale;
this._config.minZoom = -1;
this._config.maxZoom = 1;
this._config.zoomIncrement = existy(numberOfZoomLevels)
? numberOfZoomLevels <= 1
? 0
: 2 / (numberOfZoomLevels - 1)
: 0.5;
this._config.initialZoomLevel = initialZoomLevel;
this._config.instantCameraZoom = instantCameraZoom;
}
/**
* Sets map's scale.
*
* @method
* @function
* @private
* @internal
*
* @param s {number} Scale amount for both x and y coordinates
* @param instantZoom {boolean} Specifies if the scaling will be animated or instant
*/
private setScale(s: number, instantZoom: boolean): void {
if (s < this._config.minScale) {
s = this._config.minScale;
} else if (s > this._config.maxScale) {
s = this._config.maxScale;
}
this._currentScale = s;
this._mapVisualWidthScaled = this._mapVisualWidthReal * this._currentScale;
// this.mapVisualHeightScaled = this._mapVisualHeightReal * this._currentScale;
if ((existy(instantZoom) && instantZoom) || (!existy(instantZoom) && this._config.instantCameraZoom)) {
this._mapContainer.scale.set(this._currentScale);
} else {
this.moveEngine.addTween(
this._mapContainer.scale as unknown as ITweenTarget,
0.5,
{ x: this._currentScale, y: this._currentScale },
0,
'easeInOut',
true
);
}
}
/**
* Zooms camera by to the amount given.
*
* @method
* @function
* @public
*
* @param zoomAmount {number} specifies zoom amount (between -1 and 1). Use -1, -0.5, 0, 0,5, 1 for better results.
* @param instantZoom {boolean} specifies whether to zoom instantly or with a tween animation
*/
public zoomTo(zoomAmount: number, instantZoom: boolean): void {
zoomAmount = zoomAmount || 0;
let s = mathMap(
zoomAmount,
this._config.minZoom,
this._config.maxZoom,
this._config.minScale,
this._config.maxScale,
true
);
s = Math.round(s * 10) / 10;
this._currentZoom = mathMap(
s,
this._config.minScale,
this._config.maxScale,
this._config.minZoom,
this._config.maxZoom,
true
);
this._externalCenter = this._externalCenter
? this._externalCenter
: { x: this._mapVisualWidthScaled >> 1, y: 0 };
const diff = {
x: this._mapContainer.position.x + (this._mapVisualWidthScaled >> 1) - this._externalCenter.x,
y: this._mapContainer.position.y - this._externalCenter.y,
};
const oldScale = this._currentScale;
this.setScale(s, instantZoom);
const ratio = this._currentScale / oldScale;
this.centralizeToPoint(
this._externalCenter.x + diff.x * ratio,
this._externalCenter.y + diff.y * ratio,
(existy(instantZoom) && instantZoom) || (!existy(instantZoom) && this._config.instantCameraZoom)
);
// trace("scalingTo: " + this._currentScale);
// trace("zoomingTo: " + this._currentZoom);
}
/**
* Zooms the camera one level out.
*
* @method
* @function
* @public
*
* @param instantZoom {boolean} Specifies whether to zoom instantly or with a tween animation
*/
public zoomOut(instantZoom: boolean): void {
this.zoomTo(this._currentZoom - this._config.zoomIncrement, instantZoom);
}
/**
* Zooms the camera one level in.
*
* @method zoomIn
* @param [instantZoom=false] {boolean} specifies whether to zoom instantly or with a tween animation
*/
public zoomIn(instantZoom: boolean): void {
this.zoomTo(this._currentZoom + this._config.zoomIncrement, instantZoom);
}
/**
* Returns the current controllable map-object.
*
* @method getCurrentControllable
* @return {ObjectView} current controllable map-object
*/
public getCurrentControllable(): ObjectView {
return this._currentControllable;
}
/**
* Sets a map-object as the current controllable. This object will be moving in further relevant user interactions.
*
* @method setCurrentControllable
* @param obj {ObjectView} object to be set as current controllable
* @param obj.mapPos {Object} object including r and c coordinates
* @param obj.mapPos.c {number} the column index of the map location
* @param obj.mapPos.r {number} the row index of the map location
*/
public setCurrentControllable(obj: ObjectView): void {
this._currentControllable = obj;
}
/**
* Adds a reference of the given map-object to the given location in the object array.
* This should be called when an object moved or transferred to the corresponding location.
* Uses objects size property to add its reference to all relevant cells.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} object to be bind to location
* @param obj.columnSpan {number} number of tiles that map-object covers horizontally on the isometric map
* @param obj.rowSpan {number} number of tiles that map-object covers vertically on the isometric map
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private addObjRefToLocation(obj: ObjectView, pos: TColumnRowPair): void {
let k, m;
for (k = pos.c; k < pos.c + obj.columnSpan; k++) {
for (m = pos.r; m > pos.r - obj.rowSpan; m--) {
this.addObjRefToSingleLocation(obj, { c: k, r: m });
}
}
}
/**
* Adds a reference of the given map-object to the given location in the object array.
* Updates the cell as movable or not according to the object being movable onto or not.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} object to be bind to location
* @param obj.isMovableTo {boolean} is the object is movable onto by the other objects or not
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private addObjRefToSingleLocation(obj: ObjectView, pos: TColumnRowPair): void {
if (!this._objArray[pos.r][pos.c]) {
this._objArray[pos.r][pos.c] = [];
}
const index = this._objArray[pos.r][pos.c].indexOf(obj);
if (index < 0) {
this._objArray[pos.r][pos.c].push(obj);
}
if (!obj.isMovableTo) {
this._pathFinding.setDynamicCell(pos.c, pos.r, 0);
}
}
/**
* Removes references of the given map-object from the given location in the object array.
* This should be called when an object moved or transferred from the corresponding location.
* Uses objects size property to remove its references from all relevant cells.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} object to be bind to location
* @param obj.columnSpan {number} number of tiles that map-object covers horizontally on the isometric map
* @param obj.rowSpan {number} number of tiles that map-object covers vertically on the isometric map
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private removeObjRefFromLocation(obj: ObjectView, pos: TColumnRowPair): void {
let k, m;
for (k = pos.c; k < pos.c + obj.columnSpan; k++) {
for (m = pos.r; m > pos.r - obj.rowSpan; m--) {
this.removeObjRefFromSingleLocation(obj, { c: k, r: m });
}
}
}
/**
* Removes a reference of the given map-object from the given location in the object array.
* Updates the cell as movable or not according to the other object references in the same cell.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} object to be bind to location
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private removeObjRefFromSingleLocation(obj: ObjectView, pos: TColumnRowPair): void {
if (this._objArray[pos.r][pos.c]) {
const index = this._objArray[pos.r][pos.c].indexOf(obj);
if (index > -1) {
this._objArray[pos.r][pos.c].splice(index, 1);
}
if (this._objArray[pos.r][pos.c].length === 0) {
this._pathFinding.setDynamicCell(pos.c, pos.r, 1);
this._objArray[pos.r][pos.c] = null;
} else {
const a = this._objArray[pos.r][pos.c];
const l = a.length;
for (let i = 0; i < l; i++) {
if (!a[i].isMovableTo) {
this._pathFinding.setDynamicCell(pos.c, pos.r, 0);
break;
} else if (i === l - 1) {
this._pathFinding.setDynamicCell(pos.c, pos.r, 1);
}
}
}
}
}
// /**
// * Removes all map-object references from the given location in the object array.
// *
// * @private
// * @method removeAllObjectRefsFromLocation
// * @param {TColumnRowPair} pos object including r and c coordinates
// */
// private removeAllObjectRefsFromLocation(pos: TColumnRowPair): void {
// if (this._objArray[pos.r][pos.c]) {
// this._pathFinding.setDynamicCell(pos.c, pos.r, 1);
// this._objArray[pos.r][pos.c] = null;
// }
// }
/**
* Sets alphas of the map-objects referenced to the given location.
*
* @method changeObjAlphasInLocation
* @param value {number} alpha value, should be between 0 and 1
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
public changeObjAlphasInLocation(value: number, pos: TColumnRowPair): void {
const a = this._objArray[pos.r][pos.c];
if (a) {
const l = a.length;
for (let i = 0; i < l; i++) {
if (!a[i].isFloorObject && !a[i].noTransparency) {
a[i].alpha = value;
}
}
}
}
/**
* Sets a map-object's location and logically moves it to the new location.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} map-object to be moved
* @param obj.mapPos {Object} object including r and c coordinates
* @param obj.mapPos.c {number} the column index of the map location
* @param obj.mapPos.r {number} the row index of the map location
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private arrangeObjLocation(obj: ObjectView, pos: TColumnRowPair): void {
this.removeObjRefFromLocation(obj, obj.mapPos);
this.addObjRefToLocation(obj, pos);
obj.mapPos = { c: pos.c, r: pos.r };
}
/**
* Sets occlusion transparencies according to given map-object's location.
* This method only works for user-controllable object.
*
* @method
* @function
* @private
* @internal
*
* @param obj {ObjectView} current controllable map-object
* @param prevPos {TColumnRowPair} previous location of the map-object in terms of row and column coordinates
* @param pos {TColumnRowPair} new location of the map-object in terms of row and column coordinates
*/
private arrangeObjTransparencies(obj: ObjectView, prevPos: TColumnRowPair, pos: TColumnRowPair): void {
if (this._config.changeTransparencies) {
if (this._currentControllable === obj) {
if (prevPos.c > 0) {
this.changeObjAlphasInLocation(1, {
c: prevPos.c - 1,
r: prevPos.r,
});
}
if (prevPos.c > 0 && prevPos.r < this._mapSizeR - 1) {
this.changeObjAlphasInLocation(1, {
c: prevPos.c - 1,
r: prevPos.r + 1,
});
}
if (prevPos.r < this._mapSizeR - 1) {
this.changeObjAlphasInLocation(1, {
c: prevPos.c,
r: prevPos.r + 1,
});
}
if (pos.c > 0) {
this.changeObjAlphasInLocation(0.7, {
c: pos.c - 1,
r: pos.r,
});
}
if (pos.c > 0 && pos.r < this._mapSizeR - 1) {
this.changeObjAlphasInLocation(0.7, {
c: pos.c - 1,
r: pos.r + 1,
});
}
if (pos.r < this._mapSizeR - 1) {
this.changeObjAlphasInLocation(0.7, {
c: pos.c,
r: pos.r + 1,
});
}
}
// TODO: check if there is a way not to update main character alpha each time
obj.alpha = 1;
}
}
/**
* Arranges depths (z-index) of the map-objects starting from the given location.
*
* @method
* @function
* @private
* @internal
*
* @param pos {TColumnRowPair} position object including row and column coordinates
*/
private arrangeDepthsFromLocation(pos: TColumnRowPair): void {
let a, i, j, k;
for (i = pos.r; i < this._mapSizeR; i++) {
for (j = pos.c; j >= 0; j--) {
a = this._objArray[i][j];
if (a) {
for (k = 0; k < a.length; k++) {
if (!a[k].isFloorObject) {
this._objContainer.addChild(a[k]);
}
}
}
}
}
}
/**
* Clears the highlight for the old path and highlights the new path on map.
*
* @method
* @function
* @private
* @internal
*
* @param currentPath {Array(GridNode)} the old path to clear the highlight from
* @param newPath {Array(GridNode)} the new path to highlight
*/
private arrangePathHighlight(currentPath: GridNode[], newPath: GridNode[]): void {
let i: number, tile: TileView, pathItem: GridNode;
if (currentPath) {
for (i = 0; i < currentPath.length; i++) {
pathItem = currentPath[i];
if (!newPath || newPath.indexOf(pathItem) === -1) {
tile = this._tileArray[pathItem.mapPos.r][pathItem.mapPos.c];
tile.setHighlighted(false, !this._config.tileHighlightAnimated);
}
}
}
if (newPath) {
for (i = 0; i < newPath.length; i++) {
pathItem = newPath[i];
if (!currentPath || currentPath.indexOf(pathItem) === -1) {
tile = this._tileArray[pathItem.mapPos.r][pathItem.mapPos.c];
tile.setHighlighted(true, !this._config.tileHighlightAnimated);
}
}
}
}
/**
* Stops a moving object.
*
* @method
* @function
* @private
* @internal
*
* @param obj {IMovable} map-object to be moved on path
*/
private stopObject(obj: IMovable): void {
obj.currentPath = null;
obj.currentTarget = null;
obj.currentTargetTile = null;
this.moveEngine.removeMovable(obj);
}
/**
* Moves the specified map-object through a path.
*
* @method
* @function
* @private
* @internal
*
* @param obj {IMovable} map-object to be moved on path
* @param path {Array(GridNode)} path to move object on
* @param speed {number} speed of the map-object to be used during movement, if not defined it uses previous speed or the MoveEngine's default speed, default null
*/
private moveObjThrough(obj: IMovable, path: GridNode[], speed: number = null): void {
if (this._config.instantObjectRelocation) {
const tile = this._tileArray[path[0].mapPos.r][path[0].mapPos.c];
obj.position.x = tile.position.x;
obj.position.y = tile.position.y + this.tileHalfHeight;
this.arrangeObjTransparencies(obj, obj.mapPos, tile.mapPos);
this.arrangeObjLocation(obj, tile.mapPos);
this.arrangeDepthsFromLocation(tile.mapPos);
} else {
if (this._config.highlightPath && this._currentControllable === obj) {
this.arrangePathHighlight(obj.currentPath, path);
}
if (obj.currentTarget) {
// trace("Object has a target, update the path with the new one");
// this.moveEngine.addNewPathToObject(obj, path, speed);
this.stopObject(obj);
}
this.moveEngine.prepareForMove(obj, path, speed);
obj.currentTargetTile = obj.currentPath[obj.currentPathStep];
this.onObjMoveStepBegin(obj, obj.currentPath[obj.currentPathStep].mapPos);
}
}
/**
* Sets up the engine at the beginning of each tile change move for the specified object
*
* @method
* @function
* @private
* @internal
*
* @param obj {IMovable} map-object that is being moved
* @param pos {TColumnRowPair} position object including row and column coordinates
* @return {boolean} if the target tile was available and map-object has moved
*/
private onObjMoveStepBegin(obj: IMovable, pos: TColumnRowPair): boolean {
// trace("onObjMoveStepBegin");
// Note that mapPos is being updated prior to movement
obj.currentDirection = getDirBetween(obj.mapPos.r, obj.mapPos.c, pos.r, pos.c);
obj.changeVisualToDirection(obj.currentDirection, true);
// check if the next target pos is still empty
if (!this._pathFinding.isCellFilled(pos.c, pos.r)) {
// pos is movable
// this.arrangeObjTransparencies(obj, obj.mapPos, pos);
// this.arrangeObjLocation(obj, pos);
// this.arrangeDepthsFromLocation(obj.mapPos);
// if there is other object(s) on the target tile, notify the game
// const objects = this.getObjectsAtLocation(pos);
// if (objects && objects.length > 1)
// {
// if (this._config.otherObjectsOnTheNextTileCallback) { this._config.otherObjectsOnTheNextTileCallback( obj, objects ); }
// }
this.moveEngine.setMoveParameters(obj, pos);
this.moveEngine.addMovable(obj);
return true;
} else {
// pos is NOT movable
this.moveEngine.removeMovable(obj);
this.checkAndMoveObjectToLocation(obj, obj.currentPath[0].mapPos);
return false;
}
}
/**
* Sets up the engine at the end of each tile change move for the specified object
*
* @method
* @function
* @public
* @internal
*
* @param obj {IMovable} map-object that is being moved
*/
public onObjMoveStepEnd(obj: IMovable): void {
//trace("onObjMoveStepEnd");
obj.currentPathStep--;
obj.currentTarget = null;
obj.currentTargetTile = null;
const pathEnded = 0 > obj.currentPathStep;
this.moveEngine.removeMovable(obj);
if (!pathEnded) {
if (this._config.checkPathOnEachTile) {
this.checkAndMoveObjectToLocation(obj, obj.currentPath[0].mapPos);
} else {
obj.currentPath.splice(obj.currentPath.length - 1, 1);
this.moveObjThrough(obj, obj.currentPath);
}
} else {
// reached to the end of the path
obj.changeVisualToDirection(obj.currentDirection, false);
}
if (this._currentControllable === obj) {
const tile = this._tileArray[obj.mapPos.r][obj.mapPos.c];
tile.setHighlighted(false, !this._config.tileHighlightAnimated);
// if (this._config.followCharacter) { this.centralizeToLocation(obj.mapPos.c, obj.mapPos.r); }
}
if (pathEnded && this._config.objectReachedDestinationCallback) {
this._config.objectReachedDestinationCallback(obj);
}
}
/**
* Checks and follows a character
*
* @method
* @function
* @public
* @internal
*
* @param obj {IMovable} map-object to check if it is being followed
*/
public checkForFollowCharacter(obj: IMovable): void {
if (this._config.followCharacter && this._currentControllable === obj) {
this._currentFocusLocation = { c: obj.mapPos.c, r: obj.mapPos.r };
const px = this._externalCenter.x - obj.position.x * this._currentScale;
const py = this._externalCenter.y - obj.position.y * this._currentScale;
// this.centralizeToPoint(px, py, true);
this.moveEngine.addTween(
this._mapContainer.position as unknown as ITweenTarget,
0.1,
{ x: px, y: py },
0,
'easeOut',
true
);
}
}
/**
* Checks if a map-object changes the tile it is on.
*
* @method
* @function
* @public
* @internal
*
* @param obj {IMovable} map-object to be checked
*/
public checkForTileChange(obj: IMovable): void {
if (this._config.objectUpdateCallback) {
this._config.objectUpdateCallback(obj);
}
const pos = { x: obj.position.x, y: obj.position.y - this.tileHalfHeight };
// const tile = this._tileArray[obj.mapPos.r][obj.mapPos.c];
const tile = this._tileArray[obj.currentTargetTile.mapPos.r][obj.currentTargetTile.mapPos.c];
// move positions to parent scale
const vertices = [];
for (let i = 0; i < tile.vertices.length; i++) {
vertices[i] = [tile.vertices[i][0] + tile.position.x, tile.vertices[i][1] + tile.position.y];
}
if (obj.currentTargetTile.mapPos.r !== obj.mapPos.r || obj.currentTargetTile.mapPos.c !== obj.mapPos.c) {
if (isInPolygon(pos, vertices)) {
this.arrangeObjTransparencies(obj, obj.mapPos, obj.currentTargetTile.mapPos);
this.arrangeObjLocation(obj, obj.currentTargetTile.mapPos);
this.arrangeDepthsFromLocation(obj.mapPos);
// if there is other object(s) on the target tile, notify the game
const objects = this.getObjectsAtLocation(obj.currentTargetTile.mapPos);
if (objects && objects.length > 1) {
if (this._config.otherObjectsOnTheNextTileCallback) {
this._config.otherObjectsOnTheNextTileCallback(obj, objects);
}
}
}
}
}
/**
* Searches and returns a path between two locations if there is one.
*
* @method
* @function
* @public
*
* @param from {TColumnRowPair} object including row and column coordinates of the source location
* @param to {TColumnRowPair} object including row and column coordinates of the target location
*
* @return {Array(Object)} an array of path items defining the path
*/
public getPath(from: TColumnRowPair, to: TColumnRowPair): GridNode[] {
if (this._pathFinding) {
return this._pathFinding.solve(from.c, from.r, to.c, to.r);
} else {
throw new Error("Path finding hasn't been initialized yet!");
}
}
/**
* Checks for a path and moves the map-object on map if there is an available path
*
* @method
* @function
* @public
*
* @param obj {ObjectView} map-object that is being moved
* @param tile {TileView} target map-tile or any custom object that has 'mapPos' and 'isMovableTo' defined
* @param speed {number} speed of the map-object to be used during movement, if not defined it uses previous speed or the MoveEngine's default speed, default null
* @return {boolean} if there is an available path to move to the target tile
*/
public checkAndMoveObjectToTile(obj: ObjectView, tile: TileView, speed: number = null): boolean {
if (tile.isMovableTo) {
return this.checkAndMoveObjectToLocation(obj, tile.mapPos, speed);
}
return false;
}
/**
* Checks for a path and moves the map-object on map if there is an available path
*
* @method
* @function
* @public
*
* @param obj {ObjectView} map-object that is being moved
* @param pos {TColumnRowPair} object including row and column coordinates for the target location
* @param speed {number} speed of the map-object to be used during movement, if not defined it uses previous speed or the MoveEngine's default speed, default null
* @return {boolean} if there is an available path to move to the target tile
*/
public checkAndMoveObjectToLocation(obj: ObjectView, pos: TColumnRowPair, speed: number = null): boolean {
const path = this.getPath(obj.mapPos, pos);
if (path) {
// begin moving process
this.moveObjThrough(obj as IMovable, path, speed);
return path.length > 0;
}
return false;
}
/**
* Moves the current controllable map-object to a location if available.
*
* @method
* @function
* @public
*
* @param pos {TColumnRowPair} object including row and column coordinates for the target location
* @param speed {number} speed of the map-object to be used during movement, if not defined it uses previous speed or the MoveEngine's default speed, default null
* @return {boolean} if there is an available path to move to the target tile
*/
public moveCurrentControllableToLocation(pos: TColumnRowPair, speed: number = null): boolean {
if (!this._currentControllable) {
throw new Error('TRAVISO: _currentControllable is not defined!');
}
return this.checkAndMoveObjectToLocation(this._currentControllable, pos, speed);
}
/**
* Moves the current controllable map-object to one of the adjacent available tiles of the map-object specified.
*
* @method
* @function
* @public
*
* @param obj {ObjectView} target map-object
* @param speed {number} speed of the map-object to be used during movement, if not defined it uses previous speed or the MoveEngine's default speed, default null
* @return {boolean} if there is an available path to move to the target map-object
*/
public moveCurrentControllableToObj(obj: ObjectView, speed: number = null): boolean {
if (!this._currentControllable) {
throw new Error('TRAVISO: _currentControllable is not defined!');
}
// check if there is a preferred interaction point
if (obj.currentInteractionOffset) {
const targetPos = {
c: obj.mapPos.c + obj.currentInteractionOffset.c,
r: obj.mapPos.r + obj.currentInteractionOffset.r,
};
if (this.checkAndMoveObjectToLocation(this._currentControllable, targetPos, speed)) {
return true;
}
}
const cellArray = this._pathFinding.getAdjacentOpenCells(
obj.mapPos.c,
obj.mapPos.r,
obj.columnSpan,
obj.rowSpan
);
let tile: TileView;
let minLength = 3000;
let path, minPath, tempFlagHolder;
for (let i = 0; i < cellArray.length; i++) {
tile = this._tileArray[cellArray[i].mapPos.r][cellArray[i].mapPos.c];
if (tile) {
if (
tile.mapPos.c === this._currentControllable.mapPos.c &&
tile.mapPos.r === this._currentControllable.mapPos.r
) {
// already next to the object, do nothing
this.arrangePathHighlight((this._currentControllable as IMovable).currentPath, null);
this.stopObject(this._currentControllable as IMovable);
tempFlagHolder = this._config.instantObjectRelocation;
this._config.instantObjectRelocation = true;
this.moveObjThrough(this._currentControllable as IMovable, [
new GridNode(tile.mapPos.c, tile.mapPos.r, 1),
]);
this._config.instantObjectRelocation = tempFlagHolder;
this._currentControllable.changeVisualToDirection(
this._currentControllable.currentDirection,
false
);
if (this._config.objectReachedDestinationCallback) {
this._config.objectReachedDestinationCallback(this._currentControllable);
}
return true;
}
path = this.getPath(this._currentControllable.mapPos, tile.mapPos);
if (path && path.length < minLength) {
minLength = path.length;
minPath = path;
}
}
}
if (minPath) {
this.moveObjThrough(this._currentControllable as IMovable, minPath, speed);
return true;
}
return false;
}
/**
* Finds the nearest tile to the point given in the map's local scope.
*
* @method
* @function
* @public
*
* @param lp {TPositionPair} Point to check
* @return {TileView} The nearest map-tile if there is one. Otherwise `null`
*/
public getTileFromLocalPos(lp: TPositionPair): TileView {
let closestTile: TileView = null;
if (isInPolygon(lp, this._mapVertices)) {
// Using nearest point instead of checking polygon vertices for each tile. Should be faster...
// NOTE: there is an ignored bug (for better performance) that tile is not selected when u click on the far corner
const thresh = this.tileHalfWidth / 2;
let tile, i, j, dist;
let closestDist = 3000;
for (i = 0; i < this._mapSizeR; i++) {
for (j = 0; j < this._mapSizeC; j++) {
tile = this._tileArray[i][j];
if (tile) {
dist = getDist(lp, tile.position);
if (dist < closestDist) {
closestDist = dist;
closestTile = tile;
if (dist < thresh) {
break;
}
}
}
}
}
}
return closestTile;
}
/**
* Checks if an interaction occurs using the interaction data coming from PIXI.
* If there is any interaction starts necessary movements or performs necessary callbacks.
*
* @method
* @function
* @private
* @internal
*
* @param interactionData {PIXI.InteractionData} interaction data coming from PIXI
*/
private checkForTileClick(interactionData: InteractionData): void {
const lp = this._mapContainer.toLocal(interactionData.global);
const closestTile = this.getTileFromLocalPos(lp);
if (closestTile) {
const a = this._objArray[closestTile.mapPos.r][closestTile.mapPos.c];
if (a) {
for (let k = 0; k < a.length; k++) {
if (a[k].isInteractive) {
if (this._config.objectSelectCallback) {
this._config.objectSelectCallback(a[k]);
}
break;
}
// TODO CHECK: this might cause issues when there is one movable and one not movable object on the same tile
else if (a[k].isMovableTo) {
if (
this._config.dontAutoMoveToTile ||
!this._currentControllable ||
this.checkAndMoveObjectToTile(this._currentControllable, closestTile)
) {
if (this._config.highlightTargetTile) {
closestTile.setHighlighted(true, !this._config.tileHighlightAnimated);
}
if (this._config.tileSelectCallback) {
this._config.tileSelectCallback(closestTile.mapPos.r, closestTile.mapPos.c);
}
break;
}
}
}
} else if (
this._config.dontAutoMoveToTile ||
!this._currentControllable ||
this.checkAndMoveObjectToTile(this._currentControllable, closestTile)
) {
if (this._config.highlightTargetTile) {
closestTile.setHighlighted(true, !this._config.tileHighlightAnimated);
}
if (this._config.tileSelectCallback) {
this._config.tileSelectCallback(closestTile.mapPos.r, closestTile.mapPos.c);
}
}
}
}
/**
* Enables mouse/touch interactions.
*
* @method
* @function
* @public
*/
public enableInteraction(): void {
// this.mousedown = this.touchstart = this.onMouseDown.bind(this);
// this.mousemove = this.touchmove = this.onMouseMove.bind(this);
// this.mouseup = this.mouseupout = this.touchend = this.onMouseUp.bind(this);
this.on('pointerdown', this.onMouseDown_binded)
.on('pointerup', this.onMouseUp_binded)
// .on('pointerout', this.onMouseUp_binded)
.on('pointerupoutside', this.onMouseUp_binded)
.on('pointermove', this.onMouseMove_binded);
this.interactive = true;
}
/**
* Disables mouse/touch interactions.
*
* @method
* @function
* @public
*/
public disableInteraction(): void {
// this.mousedown = this.touchstart = null;
// this.mousemove = this.touchmove = null;
// this.mouseup = this.mouseupout = this.touchend = null;
this.off('pointerdown', this.onMouseDown_binded)
.off('pointerup', this.onMouseUp_binded)
// .off('pointerout', this.onMouseUp_binded)
.off('pointerupoutside', this.onMouseUp_binded)
.off('pointermove', this.onMouseMove_binded);
this.interactive = true;
this._dragging = false;
}
/**
* Checks if the given point is inside the masked area if there is a mask defined.
*
* @method
* @function
* @private
* @internal
*
* @param p {TPositionPair} point to check
* @return {boolean} if the point is inside the masked area
*/
private isInteractionInMask(p: TPositionPair): boolean {
if (this._config.useMask) {
if (
p.x < this._posFrame.x ||
p.y < this._posFrame.y ||
p.x > this._posFrame.x + this._posFrame.w ||
p.y > this._posFrame.y + this._posFrame.h
) {
return false;
}
}
return true;
}
// ******************** START: MOUSE INTERACTIONS **************************** //
/**
* Handler function for mouse-down event.
*
* @method
* @function
* @private
* @internal
*
* @param event {InteractionEvent} interaction event object
*/
private onMouseDown(event: InteractionEvent): void {
const globalPos = event.data.global;
if (!this._dragging && this.isInteractionInMask(globalPos)) {
this._dragging = true;
//this.mouseDownTime = new Date();
this._dragInitStartingX = this._dragPrevStartingX = globalPos.x;
this._dragInitStartingY = this._dragPrevStartingY = globalPos.y;
}
}
/**
* Handler function for mouse-move event.
*
* @method
* @function
* @private
* @internal
*
* @param event {InteractionEvent} interaction event object
*/
private onMouseMove(event: InteractionEvent): void {
if (this._dragging && this._config.mapDraggable) {
const globalPos = event.data.global;
this._mapContainer.position.x += globalPos.x - this._dragPrevStartingX;
this._mapContainer.position.y += globalPos.y - this._dragPrevStartingY;
this._dragPrevStartingX = globalPos.x;
this._dragPrevStartingY = globalPos.y;
}
}
/**
* Handler function for mouse-up event.
*
* @method
* @function
* @private
* @internal
*
* @param event {InteractionEvent} interaction event object
*/
private onMouseUp(event: InteractionEvent): void {
if (this._dragging) {
this._dragging = false;
//const passedTime = (new Date()) - this.mouseDownTime;
const distX = event.data.global.x - this._dragInitStartingX;
const distY = event.data.global.y - this._dragInitStartingY;
if (Math.abs(distX) < 5 && Math.abs(distY) < 5) {
// NOT DRAGGING IT IS A CLICK
this.checkForTileClick(event.data);
}
}
}
// ********************* END: MOUSE INTERACTIONS **************************** //
/**
* Repositions the content according to user settings. Call this method
* whenever you want to change the size or position of the engine.
*
* @method
* @function
* @public
*
* @param posFrame {TPositionFrame} frame to position the engine, default is the previously set posFrame and if not set, it is `{ x : 0, y : 0, w : 800, h : 600 }`
*/
public repositionContent(posFrame: TPositionFrame = null): void {
trace('EngineView repositionContent');
posFrame = posFrame || this._posFrame || { x: 0, y: 0, w: 800, h: 600 };
this.position.x = posFrame.x;
this.position.y = posFrame.y;
this._externalCenter = {
x: posFrame.w >> 1,
y: posFrame.h >> 1,
};
this.centralizeToCurrentFocusLocation(true);
if (this._bg) {
this._bg.clear();
// this._bg.lineStyle(2, 0x000000, 1);
this._bg.beginFill(this._config.backgroundColor, 1.0);
this._bg.drawRect(0, 0, posFrame.w, posFrame.h);
this._bg.endFill();
}
if (this._mapMask && this._mapContainer) {
this._mapMask.clear();
this._mapMask.beginFill(0x000000);
this._mapMask.drawRect(0, 0, posFrame.w, posFrame.h);
this._mapMask.endFill();
this._mapContainer.mask = this._mapMask;
}
this._posFrame = posFrame;
}
/**
* Clears all references and stops all animations inside the engine.
* Call this method when you want to get rid of an engine instance.
*
* @method
* @function
* @public
*/
public destroy(): void {
trace('EngineView destroy');
this.disableInteraction();
this.moveEngine.destroy();
this.moveEngine = null;
let item, i, j, k;
for (i = 0; i < this._mapSizeR; i++) {
for (j = this._mapSizeC - 1; j >= 0; j--) {
item = this._tileArray[i][j];
if (item) {
item.destroy();
// this._groundContainer.removeChild(item);
}
this._tileArray[i][j] = null;
item = this._objArray[i][j];
if (item) {
for (k = 0; k < item.length; k++) {
if (item[k]) {
item[k].destroy();
// this._objContainer.removeChild(item[k]);
}
item[k] = null;
}
}
this._objArray[i][j] = null;
}
}
item = null;
this._pathFinding.destroy();
this._pathFinding = null;
this._currentControllable = null;
this._tileArray = null;
this._objArray = null;
this._bg = null;
this._groundContainer = null;
this._objContainer = null;
if (this._mapContainer) {
this._mapContainer.mask = null;
this.removeChild(this._mapContainer);
this._mapContainer = null;
}
if (this._mapMask) {
this.removeChild(this._mapMask);
this._mapMask = null;
}
this._config = null;
this.mapData.groundMapData = null;
this.mapData.objectsMapData = null;
this.mapData.objects = null;
this.mapData.tiles = null;
this.mapData = null;
}
// Externally modifiable properties for EngineView
/**
* specifies whether to zoom instantly or with a tween animation
* @property
* @default false
*/
public get instantCameraZoom(): boolean {
return this._config.instantCameraZoom;
}
public set instantCameraZoom(value: boolean) {
this._config.instantCameraZoom = value;
}
/**
* defines if the camera will follow the current controllable or not
* @property
* @default true
*/
public get followCharacter(): boolean {
return this._config.followCharacter;
}
public set followCharacter(value: boolean) {
this._config.followCharacter = value;
}
/**
* specifies whether the camera moves instantly or with a tween animation to the target location
* @property
* @default false
*/
public get instantCameraRelocation(): boolean {
return this._config.instantCameraRelocation;
}
public set instantCameraRelocation(value: boolean) {
this._config.instantCameraRelocation = value;
}
/**
* specifies whether the map-objects will be moved to target location instantly or with an animation
* @property
* @default false
*/
public get instantObjectRelocation(): boolean {
return this._config.instantObjectRelocation;
}
public set instantObjectRelocation(value: boolean) {
this._config.instantObjectRelocation = value;
}
/**
* make objects transparent when the controllable is behind them
* @property
* @default true
*/
public get changeTransparencies(): boolean {
return this._config.changeTransparencies;
}
public set changeTransparencies(value: boolean) {
this._config.changeTransparencies = value;
}
/**
* highlight the path when the current controllable moves on the map
* @property
* @default true
*/
public get highlightPath(): boolean {
return this._config.highlightPath;
}
public set highlightPath(value: boolean) {
this._config.highlightPath = value;
}
/**
* Highlight the target tile when the current controllable moves on the map
* @property
* @default true
*/
public get highlightTargetTile(): boolean {
return this._config.highlightTargetTile;
}
public set highlightTargetTile(value: boolean) {
this._config.highlightTargetTile = value;
}
/**
* animate the tile highlights
* @property
* @default true
*/
public get tileHighlightAnimated(): boolean {
return this._config.tileHighlightAnimated;
}
public set tileHighlightAnimated(value: boolean) {
this._config.tileHighlightAnimated = value;
}
/**
* When a tile selected don't move the controllable immediately but still call 'tileSelectCallback'
* @property
* @default false
*/
public get dontAutoMoveToTile(): boolean {
return this._config.dontAutoMoveToTile;
}
public set dontAutoMoveToTile(value: boolean) {
this._config.dontAutoMoveToTile = value;
}
/**
* Engine looks for a path every time an object moves to a new tile on the path
* (set to false if you don't have moving objects other then your controllable on your map)
* @property
* @default true
*/
public get checkPathOnEachTile(): boolean {
return this._config.checkPathOnEachTile;
}
public set checkPathOnEachTile(value: boolean) {
this._config.checkPathOnEachTile = value;
}
/**
* enable dragging the map with touch-and-touchmove or mousedown-and-mousemove on the map
* @property
* @default true
*/
public get mapDraggable(): boolean {
return this._config.mapDraggable;
}
public set mapDraggable(value: boolean) {
this._config.mapDraggable = value;
}
/**
* callback function that will be called once everything is loaded and engine instance is ready
* @property
* @default null
*/
public get engineInstanceReadyCallback(): (engineInstance: EngineView) => unknown {
return this._config.engineInstanceReadyCallback;
}
public set engineInstanceReadyCallback(value: (engineInstance: EngineView) => unknown) {
this._config.engineInstanceReadyCallback = value;
}
/**
* callback function that will be called when a tile is selected. Params will be the row and column indexes of the tile selected.
* @property
* @default null
*/
public get tileSelectCallback(): (r: number, c: number) => unknown {
return this._config.tileSelectCallback;
}
public set tileSelectCallback(value: (r: number, c: number) => unknown) {
this._config.tileSelectCallback = value;
}
/**
* callback function that will be called when a tile with an interactive map-object on it is selected. Call param will be the object selected.
* @property
* @default null
*/
public get objectSelectCallback(): (objectView: ObjectView) => unknown {
return this._config.objectSelectCallback;
}
public set objectSelectCallback(value: (objectView: ObjectView) => unknown) {
this._config.objectSelectCallback = value;
}
/**
* callback function that will be called when any moving object reaches its destination. Call param will be the moving object itself.
* @property
* @default null
*/
public get objectReachedDestinationCallback(): (objectView: ObjectView) => unknown {
return this._config.objectReachedDestinationCallback;
}
public set objectReachedDestinationCallback(value: (objectView: ObjectView) => unknown) {
this._config.objectReachedDestinationCallback = value;
}
/**
* callback function that will be called when any moving object is in move and there are other objects on the next tile. Call params will be the moving object and an array of objects on the next tile.
* @property
* @default null
*/
public get otherObjectsOnTheNextTileCallback(): (
objectView: ObjectView,
otherObjectViews: ObjectView[]
) => unknown {
return this._config.otherObjectsOnTheNextTileCallback;
}
public set otherObjectsOnTheNextTileCallback(
value: (objectView: ObjectView, otherObjectViews: ObjectView[]) => unknown
) {
this._config.otherObjectsOnTheNextTileCallback = value;
}
/**
* callback function that will be called every time an objects direction or position changed
* @property
* @default null
*/
public get objectUpdateCallback(): (objectView: ObjectView) => unknown {
return this._config.objectUpdateCallback;
}
public set objectUpdateCallback(value: (objectView: ObjectView) => unknown) {
this._config.objectUpdateCallback = value;
}
/**
* alpha value for the tile highlight stroke (this will be overridden if a highlight-image is defined)
* @property
* @default 1.0
*/
public get tileHighlightStrokeAlpha(): number {
return this._config.tileHighlightStrokeAlpha;
}
public set tileHighlightStrokeAlpha(value: number) {
this._config.tileHighlightStrokeAlpha = value;
}
/**
* color code for the tile highlight stroke (this will be overridden if a highlight-image is defined)
* @property
* @default 0xFFFFFF
*/
public get tileHighlightStrokeColor(): number {
return this._config.tileHighlightStrokeColor;
}
public set tileHighlightStrokeColor(value: number) {
this._config.tileHighlightStrokeColor = value;
}
/**
* alpha value for the tile highlight fill (this will be overridden if a highlight-image is defined)
* @property
* @default 1.0
*/
public get tileHighlightFillAlpha(): number {
return this._config.tileHighlightFillAlpha;
}
public set tileHighlightFillAlpha(value: number) {
this._config.tileHighlightFillAlpha = value;
}
/**
* color code for the tile highlight fill (this will be overridden if a highlight-image is defined)
* @property
* @default 0x80d7ff
*/
public get tileHighlightFillColor(): number {
return this._config.tileHighlightFillColor;
}
public set tileHighlightFillColor(value: number) {
this._config.tileHighlightFillColor = value;
}
} | the_stack |
import {expect} from 'chai';
import * as Rx from '../../dist/cjs/Rx';
import marbleTestingSignature = require('../helpers/marble-testing'); // tslint:disable-line:no-require-imports
declare const { asDiagram };
declare const hot: typeof marbleTestingSignature.hot;
declare const cold: typeof marbleTestingSignature.cold;
declare const expectObservable: typeof marbleTestingSignature.expectObservable;
declare const expectSubscriptions: typeof marbleTestingSignature.expectSubscriptions;
const Observable = Rx.Observable;
/** @test {filter} */
describe('Observable.prototype.filter', () => {
function oddFilter(x) {
return (+x) % 2 === 1;
}
function isPrime(i) {
if (+i <= 1) { return false; }
const max = Math.floor(Math.sqrt(+i));
for (let j = 2; j <= max; ++j) {
if (+i % j === 0) { return false; }
}
return true;
}
asDiagram('filter(x => x % 2 === 1)')('should filter out even values', () => {
const source = hot('--0--1--2--3--4--|');
const subs = '^ !';
const expected = '-----1-----3-----|';
expectObservable(source.filter(oddFilter)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ !';
const expected = '--3---5----7-------|';
expectObservable(source.filter(isPrime)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter with an always-true predicate', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '--3-4-5-6--7-8--9--|';
const predicate = () => { return true; };
expectObservable(source.filter(predicate)).toBe(expected);
});
it('should filter with an always-false predicate', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '-------------------|';
const predicate = () => { return false; };
expectObservable(source.filter(predicate)).toBe(expected);
});
it('should filter in only prime numbers, source unsubscribes early', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ ! ';
const unsub = ' ! ';
const expected = '--3---5----7- ';
expectObservable(source.filter(isPrime), unsub).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers, source throws', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--#');
const subs = '^ !';
const expected = '--3---5----7-------#';
expectObservable(source.filter(isPrime)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers, but predicate throws', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ ! ';
const expected = '--3---5-# ';
let invoked = 0;
function predicate(x: any, index: number) {
invoked++;
if (invoked === 4) {
throw 'error';
}
return isPrime(x);
};
expectObservable((<any>source).filter(predicate)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers, predicate with index', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ !';
const expected = '--3--------7-------|';
function predicate(x: any, i: number) {
return isPrime((+x) + i * 10);
}
expectObservable((<any>source).filter(predicate)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should invoke predicate once for each checked value', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '--3---5----7-------|';
let invoked = 0;
const predicate = (x: any) => {
invoked++;
return isPrime(x);
};
const r = source
.filter(predicate)
.do(null, null, () => {
expect(invoked).to.equal(7);
});
expectObservable(r).toBe(expected);
});
it('should filter in only prime numbers, predicate with index, ' +
'source unsubscribes early', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ ! ';
const unsub = ' ! ';
const expected = '--3--------7- ';
function predicate(x: any, i: number) {
return isPrime((+x) + i * 10);
}
expectObservable((<any>source).filter(predicate), unsub).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers, predicate with index, source throws', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--#');
const subs = '^ !';
const expected = '--3--------7-------#';
function predicate(x: any, i: number) {
return isPrime((+x) + i * 10);
}
expectObservable((<any>source).filter(predicate)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should filter in only prime numbers, predicate with index and throws', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ ! ';
const expected = '--3-----# ';
let invoked = 0;
function predicate(x: any, i: number) {
invoked++;
if (invoked === 4) {
throw 'error';
}
return isPrime((+x) + i * 10);
};
expectObservable((<any>source).filter(predicate)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should compose with another filter to allow multiples of six', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '--------6----------|';
expectObservable(
source
.filter((x: number) => x % 2 === 0)
.filter((x: number) => x % 3 === 0)
).toBe(expected);
});
it('should be able to accept and use a thisArg', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '--------6----------|';
function Filterer() {
this.filter1 = (x: number) => x % 2 === 0;
this.filter2 = (x: number) => x % 3 === 0;
}
const filterer = new Filterer();
expectObservable(
source
.filter(function (x) { return this.filter1(x); }, filterer)
.filter(function (x) { return this.filter2(x); }, filterer)
.filter(function (x) { return this.filter1(x); }, filterer)
).toBe(expected);
});
it('should be able to use filter and map composed', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const expected = '----a---b----c-----|';
const values = { a: 16, b: 36, c: 64 };
expectObservable(
source
.filter((x: number) => x % 2 === 0)
.map((x: number) => x * x)
).toBe(expected, values);
});
it('should propagate errors from the source', () => {
const source = hot('--0--1--2--3--4--#');
const subs = '^ !';
const expected = '-----1-----3-----#';
expectObservable(source.filter(oddFilter)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should support Observable.empty', () => {
const source = cold('|');
const subs = '(^!)';
const expected = '|';
expectObservable(source.filter(oddFilter)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should support Observable.never', () => {
const source = cold('-');
const subs = '^';
const expected = '-';
expectObservable(source.filter(oddFilter)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should support Observable.throw', () => {
const source = cold('#');
const subs = '(^!)';
const expected = '#';
expectObservable(source.filter(oddFilter)).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should send errors down the error path', (done: MochaDone) => {
Observable.of(42).filter(<any>((x: number, index: number) => {
throw 'bad';
}))
.subscribe((x: number) => {
done(new Error('should not be called'));
}, (err: any) => {
expect(err).to.equal('bad');
done();
}, () => {
done(new Error('should not be called'));
});
});
it('should not break unsubscription chain when unsubscribed explicitly', () => {
const source = hot('-1--2--^-3-4-5-6--7-8--9--|');
const subs = '^ ! ';
const unsub = ' ! ';
const expected = '--3---5----7- ';
const r = source
.mergeMap((x: any) => Observable.of(x))
.filter(isPrime)
.mergeMap((x: any) => Observable.of(x));
expectObservable(r, unsub).toBe(expected);
expectSubscriptions(source.subscriptions).toBe(subs);
});
it('should support type guards without breaking previous behavior', () => {
// tslint:disable no-unused-variable
// type guards with interfaces and classes
{
interface Bar { bar?: string; }
interface Baz { baz?: number; }
class Foo implements Bar, Baz { constructor(public bar: string = 'name', public baz: number = 42) {} }
const isBar = (x: any): x is Bar => x && (<Bar>x).bar !== undefined;
const isBaz = (x: any): x is Baz => x && (<Baz>x).baz !== undefined;
const foo: Foo = new Foo();
Observable.of(foo).filter(foo => foo.baz === 42)
.subscribe(x => x.baz); // x is still Foo
Observable.of(foo).filter(isBar)
.subscribe(x => x.bar); // x is Bar!
const foobar: Bar = new Foo(); // type is interface, not the class
Observable.of(foobar).filter(foobar => foobar.bar === 'name')
.subscribe(x => x.bar); // <-- x is still Bar
Observable.of(foobar).filter(isBar)
.subscribe(x => x.bar); // <--- x is Bar!
const barish = { bar: 'quack', baz: 42 }; // type can quack like a Bar
Observable.of(barish).filter(x => x.bar === 'quack')
.subscribe(x => x.bar); // x is still { bar: string; baz: number; }
Observable.of(barish).filter(isBar)
.subscribe(bar => bar.bar); // x is Bar!
}
// type guards with primitive types
{
const xs: Rx.Observable<string | number> = Observable.from([ 1, 'aaa', 3, 'bb' ]);
// This type guard will narrow a `string | number` to a string in the examples below
const isString = (x: string | number): x is string => typeof x === 'string';
xs.filter(isString)
.subscribe(s => s.length); // s is string
// In contrast, this type of regular boolean predicate still maintains the original type
xs.filter(x => typeof x === 'number')
.subscribe(x => x); // x is still string | number
xs.filter((x, i) => typeof x === 'number' && x > i)
.subscribe(x => x); // x is still string | number
}
// tslint:disable enable
});
}); | the_stack |
import {KdError, StringMap} from '@api/root.shared';
import {PersistentVolumeSource} from '@api/volume.api';
export interface TypeMeta {
kind: string;
scalable?: boolean;
restartable?: boolean;
}
export interface ListMeta {
totalItems: number;
}
export interface ObjectMeta {
name?: string;
namespace?: string;
labels?: StringMap;
annotations?: StringMap;
creationTimestamp?: string;
uid?: string;
}
export interface JobStatus {
status: string;
message: string;
conditions: Condition[];
}
export interface ResourceDetail {
objectMeta: ObjectMeta;
typeMeta: TypeMeta;
errors: K8sError[];
}
export interface ResourceList {
listMeta: ListMeta;
items?: Resource[];
errors?: K8sError[];
}
export interface Resource {
objectMeta: ObjectMeta;
typeMeta: TypeMeta;
}
export interface ResourceOwner extends Resource {
pods: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface LabelSelector {
matchLabels: StringMap;
}
export interface CapacityItem {
resourceName: string;
quantity: string;
}
// List types
export interface ClusterRoleList extends ResourceList {
items: ClusterRole[];
}
export interface ClusterRoleBindingList extends ResourceList {
items: ClusterRoleBinding[];
}
export interface RoleList extends ResourceList {
items: Role[];
}
export interface RoleBindingList extends ResourceList {
items: RoleBinding[];
}
export interface ConfigMapList extends ResourceList {
items: ConfigMap[];
}
export interface CronJobList extends ResourceList {
cumulativeMetrics: Metric[] | null;
items: CronJob[];
status: Status;
}
export interface CRDList extends ResourceList {
items: CRD[];
}
export interface CRDObjectList extends ResourceList {
typeMeta: TypeMeta;
items: CRDObject[];
}
export interface DaemonSetList extends ResourceList {
cumulativeMetrics: Metric[] | null;
daemonSets: DaemonSet[];
status: Status;
}
export interface DeploymentList extends ResourceList {
cumulativeMetrics: Metric[] | null;
deployments: Deployment[];
status: Status;
}
export interface EndpointList extends ResourceList {
endpoints: Endpoint[];
}
export interface EventList extends ResourceList {
events: Event[];
}
export interface HorizontalPodAutoscalerList extends ResourceList {
horizontalpodautoscalers: HorizontalPodAutoscaler[];
}
export interface IngressList extends ResourceList {
items: Ingress[];
}
export interface ServiceAccountList extends ResourceList {
items: ServiceAccount[];
}
export interface NetworkPolicyList extends ResourceList {
items: NetworkPolicy[];
}
export interface JobList extends ResourceList {
cumulativeMetrics: Metric[] | null;
jobs: Job[];
status: Status;
}
export interface NamespaceList extends ResourceList {
namespaces: Namespace[];
}
export interface NodeList extends ResourceList {
cumulativeMetrics: Metric[] | null;
nodes: Node[];
}
export interface PersistentVolumeClaimList extends ResourceList {
items: PersistentVolumeClaim[];
}
export interface PersistentVolumeList extends ResourceList {
items: PersistentVolume[];
}
export interface PodContainerList {
containers: string[];
}
export interface PodList extends ResourceList {
pods: Pod[];
status: Status;
podInfo?: PodInfo;
cumulativeMetrics: Metric[] | null;
}
export interface ReplicaSetList extends ResourceList {
cumulativeMetrics: Metric[] | null;
replicaSets: ReplicaSet[];
status: Status;
}
export interface ReplicationControllerList extends ResourceList {
replicationControllers: ReplicationController[];
status: Status;
}
export interface ResourceQuotaDetailList extends ResourceList {
items: ResourceQuotaDetail[];
}
export interface SecretList extends ResourceList {
secrets: Secret[];
}
export interface ServiceList extends ResourceList {
services: Service[];
}
export interface StatefulSetList extends ResourceList {
cumulativeMetrics: Metric[] | null;
statefulSets: StatefulSet[];
status: Status;
}
export interface StorageClassList extends ResourceList {
items: StorageClass[];
}
// Simple detail types
export type ClusterRole = Resource;
export type ClusterRoleBinding = Resource;
export type Role = Resource;
export type RoleBinding = Resource;
export type ConfigMap = Resource;
export type ServiceAccount = Resource;
export type NetworkPolicy = Resource;
export interface Controller extends Resource {
pods: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface CronJob extends Resource {
schedule: string;
suspend: boolean;
active: number;
lastSchedule: string;
containerImages: string[];
}
export interface CRD extends Resource {
group: string;
scope: string;
nameKind: string;
established: string;
}
export type CRDObject = Resource;
export interface DaemonSet extends Resource {
podInfo: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface Deployment extends Resource {
pods: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface EndpointResourceList extends ResourceList {
endpoints: EndpointResource[];
}
export interface EndpointResource extends Resource {
host: string;
nodeName: string;
ready: boolean;
ports: EndpointResourcePort[];
}
export interface EndpointResourcePort {
name: string;
port: number;
protocol: string;
}
export interface Port {
port: number;
name: string;
protocol: string;
nodePort?: number;
}
export interface Endpoint {
host: string;
nodeName?: string;
ports: Port[];
ready?: boolean;
typeMeta?: TypeMeta;
objectMeta?: ObjectMeta;
}
export interface Event extends Resource {
message: string;
sourceComponent: string;
sourceHost: string;
object: string;
objectKind?: string;
objectName?: string;
objectNamespace?: string;
count: number;
firstSeen: string;
lastSeen: string;
reason: string;
type: string;
}
export interface HorizontalPodAutoscaler extends Resource {
scaleTargetRef: ScaleTargetRef;
minReplicas: number;
maxReplicas: number;
currentCPUUtilization: number;
targetCPUUtilization?: number;
}
export interface Ingress extends Resource {
endpoints: Endpoint[];
hosts: string[];
}
export interface Job extends Resource {
podInfo: PodInfo;
containerImages: string[];
initContainerImages: string[];
parallelism: number;
}
export interface Namespace extends Resource {
phase: string;
}
export interface Node extends Resource {
ready: string;
}
export interface PersistentVolume extends Resource {
capacity: StringMap;
storageClass: string;
accessModes: string[];
reclaimPolicy: string;
mountOptions?: string[];
status: string;
claim: string;
reason: string;
}
export interface PersistentVolumeClaim extends Resource {
status: string;
volume: string;
}
export interface Pod extends Resource {
status: string;
podIP?: string;
restartCount: number;
qosClass?: string;
metrics: PodMetrics;
warnings: Event[];
nodeName: string;
serviceAccountName: string;
containerImages: string[];
}
export interface PodContainer {
name: string;
restartCount: number;
}
export interface ReplicaSet extends Resource {
podInfo: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface ReplicationController extends Resource {
podInfo: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface Secret extends Resource {
type: string;
}
export interface Service extends Resource {
internalEndpoint: Endpoint;
externalEndpoints: Endpoint[];
selector: StringMap;
type: string;
clusterIP: string;
}
export interface StatefulSet extends Resource {
podInfo: PodInfo;
containerImages: string[];
initContainerImages: string[];
}
export interface StorageClass extends Resource {
provisioner: string;
parameters: StringMap[];
}
// Detail types
export interface ReplicaSetDetail extends ResourceDetail {
selector: LabelSelector;
podInfo: PodInfo;
podList: PodList;
containerImages: string[];
initContainerImages: string[];
eventList: EventList;
}
export interface ResourceQuotaDetail extends ResourceDetail {
scopes: string[];
statusList: {[key: string]: ResourceQuotaStatus};
}
export interface DeploymentDetail extends ResourceDetail {
selector: Label[];
statusInfo: DeploymentInfo;
conditions: Condition[];
strategy: string;
minReadySeconds: number;
revisionHistoryLimit?: number;
rollingUpdateStrategy?: RollingUpdateStrategy;
events: EventList;
}
export interface ReplicationControllerDetail extends ResourceDetail {
labelSelector: StringMap;
containerImages: string[];
initContainerImages: string[];
podInfo: PodInfo;
podList: PodList;
serviceList: ServiceList;
eventList: EventList;
hasMetrics: boolean;
}
export interface ServiceDetail extends ResourceDetail {
internalEndpoint: Endpoint;
externalEndpoints: Endpoint[];
endpointList: EndpointList;
selector: StringMap;
type: string;
clusterIP: string;
podList: PodList;
sessionAffinity: string;
}
export interface DaemonSetDetail extends ResourceDetail {
labelSelector: StringMap;
containerImages: string[];
initContainerImages: string[];
podInfo: PodInfo;
}
export interface NamespaceDetail extends ResourceDetail {
phase: string;
eventList: EventList;
resourceLimits: LimitRange[];
resourceQuotaList: ResourceQuotaDetailList;
}
export interface PolicyRule {
verbs: string[];
apiGroups: string[];
resources: string[];
resourceNames: string[];
nonResourceURLs: string[];
}
export interface ClusterRoleDetail extends ResourceDetail {
rules: PolicyRule[];
}
export interface Subject {
kind: string;
apiGroup: string;
name: string;
namespace: string;
}
export interface ResourceRef {
kind: string;
apiGroup: string;
name: string;
}
export interface ClusterRoleBindingDetail extends ResourceDetail {
subjects: Subject[];
roleRef: ResourceRef;
}
export interface RoleDetail extends ResourceDetail {
rules: PolicyRule[];
}
export interface RoleBindingDetail extends ResourceDetail {
subjects: Subject[];
roleRef: ResourceRef;
}
export interface SecretDetail extends ResourceDetail {
type: string;
data: StringMap;
}
export type ServiceAccountDetail = ResourceDetail;
export interface IngressDetail extends ResourceDetail {
endpoints: Endpoint[];
spec: IngressSpec;
}
export interface IngressSpec {
ingressClassName?: string;
defaultBackend?: IngressBackend;
rules?: IngressSpecRule[];
tls?: IngressSpecTLS[];
}
export interface IngressSpecTLS {
hosts: string[];
secretName: string;
}
export interface IngressBackend {
service?: IngressBackendService;
resource?: ResourceRef;
}
export interface IngressBackendService {
name: string;
port: IngressBackendServicePort;
}
export interface IngressBackendServicePort {
name?: string;
number?: number;
}
export interface IngressSpecRule {
host?: string;
http: IngressSpecRuleHttp;
}
export interface IngressSpecRuleHttp {
paths: IngressSpecRuleHttpPath[];
}
export interface IngressSpecRuleHttpPath {
path: string;
pathType: string;
backend: IngressBackend;
}
export interface NetworkPolicyDetail extends ResourceDetail {
podSelector: LabelSelector;
ingress?: any;
egress?: any;
policyTypes?: string[];
}
export interface PersistentVolumeClaimDetail extends ResourceDetail {
status: string;
volume: string;
capacity: string;
storageClass: string;
accessModes: string[];
}
export interface StorageClassDetail extends ResourceDetail {
parameters: StringMap;
provisioner: string;
}
export interface ConfigMapDetail extends ResourceDetail {
data: StringMap;
}
export interface CRDDetail extends ResourceDetail {
version?: string;
group: string;
scope: string;
names: CRDNames;
versions: CRDVersion[];
objects: CRDObjectList;
conditions: Condition[];
subresources: string[];
}
export type CRDObjectDetail = ResourceDetail;
export interface JobDetail extends ResourceDetail {
podInfo: PodInfo;
podList: PodList;
containerImages: string[];
initContainerImages: string[];
eventList: EventList;
parallelism: number;
completions: number;
jobStatus: JobStatus;
}
export interface CronJobDetail extends ResourceDetail {
schedule: string;
suspend: boolean;
active: number;
lastSchedule: string;
concurrencyPolicy: string;
startingDeadlineSeconds: number;
}
export interface StatefulSetDetail extends ResourceDetail {
podInfo: PodInfo;
podList: PodList;
containerImages: string[];
initContainerImages: string[];
eventList: EventList;
}
export interface PersistentVolumeDetail extends ResourceDetail {
status: string;
claim: string;
reclaimPolicy: string;
accessModes: string[];
capacity: StringMap;
message: string;
storageClass: string;
reason: string;
persistentVolumeSource: PersistentVolumeSource;
mountOptions?: string[];
}
export interface PodDetail extends ResourceDetail {
initContainers: Container[];
containers: Container[];
podPhase: string;
podIP: string;
nodeName: string;
restartCount: number;
qosClass: string;
metrics: Metric[];
conditions: Condition[];
controller: Resource;
imagePullSecrets: LocalObjectReference[];
eventList: EventList;
persistentVolumeClaimList: PersistentVolumeClaimList;
securityContext: PodSecurityContext;
}
export interface LocalObjectReference {
name: string;
}
export interface NodeDetail extends ResourceDetail {
phase: string;
podCIDR: string;
providerID: string;
unschedulable: boolean;
allocatedResources: NodeAllocatedResources;
nodeInfo: NodeInfo;
containerImages: string[];
initContainerImages: string[];
addresses: NodeAddress[];
taints: NodeTaint[];
metrics: Metric[];
conditions: Condition[];
podList: PodList;
eventList: EventList;
}
export interface HorizontalPodAutoscalerDetail extends ResourceDetail {
scaleTargetRef: ScaleTargetRef;
minReplicas: number;
maxReplicas: number;
currentCPUUtilization: number;
targetCPUUtilization?: number;
currentReplicas: number;
desiredReplicas: number;
lastScaleTime: string;
}
// Validation types
export interface AppNameValidity {
valid: boolean;
}
export interface AppNameValiditySpec {
name: string;
namespace: string;
}
export interface ImageReferenceValidity {
valid: boolean;
reason: string;
}
export interface ImageReferenceValiditySpec {
reference: string;
}
export interface ProtocolValidity {
valid: boolean;
}
export interface ProtocolValiditySpec {
protocol: string;
isExternal: boolean;
}
// Auth related types
export interface AuthResponse {
jweToken: string;
errors: K8sError[];
}
export interface CanIResponse {
allowed: boolean;
}
export interface LoginSpec {
username: string;
password: string;
token: string;
kubeconfig: string;
}
export interface LoginStatus {
tokenPresent: boolean;
headerPresent: boolean;
httpsMode: boolean;
}
export interface AppDeploymentContentSpec {
name: string;
namespace: string;
content: string;
validate: boolean;
}
export interface AppDeploymentContentResponse {
error: string;
contet: string;
name: string;
}
export interface AppDeploymentSpec {
containerImage: string;
containerCommand?: string;
containerCommandArgs?: string;
isExternal: boolean;
name: string;
description?: string;
portMappings: PortMapping[];
labels: Label[];
replicas: number;
namespace: string;
memoryRequirement?: string;
cpuRequirement?: number;
runAsPrivileged: boolean;
imagePullSecret: string;
variables: EnvironmentVariable[];
}
export interface CsrfToken {
token: string;
}
export interface LocalSettings {
theme: string;
}
export interface Theme {
name: string;
displayName: string;
isDark: boolean;
}
export interface AppConfig {
serverTime: number;
}
export interface ErrStatus {
message: string;
code: number;
status: string;
reason: string;
}
export interface K8sError {
ErrStatus: ErrStatus;
toKdError(): KdError;
}
export interface Condition {
type: string;
status: string;
lastProbeTime: string;
lastTransitionTime: string;
reason: string;
message: string;
}
export interface ContainerStateWaiting {
reason?: string;
message?: string;
}
export interface ContainerStateRunning {
startedAt?: string;
}
export interface ContainerStateTerminated {
exitCode: number;
reason?: string;
message?: string;
signal?: number;
}
export interface ContainerState {
waiting?: ContainerStateWaiting;
terminated?: ContainerStateTerminated;
running?: ContainerStateRunning;
}
export interface ResourceQuotaStatus {
used: string;
hard: string;
}
export interface MetricResult {
timestamp: string;
value: number;
}
export interface Metric {
dataPoints: DataPoint[];
metricName: string;
aggregation: string;
}
export interface DataPoint {
x: number;
y: number;
}
export interface ConfigMapKeyRef {
name: string;
key: string;
}
export interface SecretKeyRef {
name: string;
key: string;
}
export interface EnvVar {
name: string;
value: string;
valueFrom: EnvVarSource;
}
export interface EnvVarSource {
configMapKeyRef: ConfigMapKeyRef;
secretKeyRef: SecretKeyRef;
}
export interface Container {
name: string;
image: string;
env: EnvVar[];
commands: string[];
args: string[];
volumeMounts: VolumeMounts[];
securityContext: ContainerSecurityContext;
status: ContainerStatus;
livenessProbe: Probe;
readinessProbe: Probe;
startupProbe: Probe;
}
export interface Probe {
httpGet?: ProbeHttpGet;
tcpSocket?: ProbeTcpSocket;
exec?: ProbeExec;
initialDelaySeconds?: number;
timeoutSeconds?: number;
periodSeconds?: number;
successThreshold?: number;
failureThreshold?: number;
terminationGracePeriodSeconds?: number;
}
export interface ProbeHttpGet {
path?: string;
port: string | number;
host?: string;
scheme?: string;
httpHeaders?: string[];
}
export interface ProbeTcpSocket {
port: string | number;
host?: string;
}
export interface ProbeExec {
command?: string[];
}
export interface ContainerStatus {
name: string;
state: ContainerState;
lastTerminationState: ContainerState;
ready: boolean;
restartCount: number;
started?: boolean;
}
export interface ISecurityContext {
seLinuxOptions?: SELinuxOptions;
windowsOptions?: WindowsSecurityContextOptions;
runAsUser?: number;
runAsGroup?: number;
runAsNonRoot?: boolean;
seccompProfile?: SeccompProfile;
}
export interface ContainerSecurityContext extends ISecurityContext {
capabilities?: Capabilities;
privileged?: boolean;
readOnlyRootFilesystem?: boolean;
allowPrivilegeEscalation?: boolean;
procMount?: string; // ProcMountType;
}
export interface PodSecurityContext extends ISecurityContext {
fsGroup?: number;
fsGroupChangePolicy?: string;
supplementalGroups?: number[];
sysctls?: Sysctl[];
}
export interface Sysctl {
name: string;
value: string;
}
export interface Capabilities {
add: string[];
drop: string[];
}
export interface SELinuxOptions {
user?: string;
role?: string;
type?: string;
level?: string;
}
export interface WindowsSecurityContextOptions {
gMSACredentialSpecName?: string;
gMSACredentialSpec?: string;
runAsUserName?: string;
}
export interface SeccompProfile {
type: string; // SeccompProfileType;
localhostProfile?: string;
}
export interface VolumeMounts {
name: string;
readOnly: boolean;
mountPath: string;
subPath: string;
volume: PersistentVolumeSource;
}
export interface CRDNames {
plural: string;
singular?: string;
shortNames?: string[];
kind: string;
listKind?: string;
categories?: string[];
}
export interface CRDVersion {
name: string;
served: boolean;
storage: boolean;
}
export interface PodMetrics {
cpuUsage: number;
memoryUsage: number;
cpuUsageHistory: MetricResult[];
memoryUsageHistory: MetricResult[];
}
export interface Status {
running: number;
failed: number;
pending: number;
succeeded: number;
}
export interface PodStatus {
podPhase: string;
status: string;
containerStates: ContainerState[];
}
export interface PodInfo {
current: number;
desired: number;
running: number;
pending: number;
failed: number;
succeeded: number;
warnings: Event[];
}
export interface NodeAllocatedResources {
cpuRequests: number;
cpuRequestsFraction: number;
cpuLimits: number;
cpuLimitsFraction: number;
cpuCapacity: number;
memoryRequests: number;
memoryRequestsFraction: number;
memoryLimits: number;
memoryLimitsFraction: number;
memoryCapacity: number;
allocatedPods: number;
podCapacity: number;
podFraction: number;
}
export interface NodeInfo {
machineID: string;
systemUUID: string;
bootID: string;
kernelVersion: string;
osImage: string;
containerRuntimeVersion: string;
kubeletVersion: string;
kubeProxyVersion: string;
operatingSystem: string;
architecture: string;
}
export interface NodeAddress {
type: string;
address: string;
}
export interface NodeTaint {
key: string;
value: string;
effect: string;
timeAdded: number;
}
export interface PortMapping {
port: number | null;
protocol: string;
targetPort: number | null;
}
export interface EnvironmentVariable {
name: string;
value: string;
}
export interface Label {
key: string;
value: string;
}
export interface PodEvent {
reason: string;
message: string;
}
export interface RollingUpdateStrategy {
maxSurge: number | string;
maxUnavailable: number | string;
}
export interface DeploymentInfo {
replicas: number;
updated: number;
available: number;
unavailable: number;
}
export interface ReplicationControllerSpec {
replicas: number;
}
export interface ReplicaCounts {
desiredReplicas: number;
actualReplicas: number;
}
export interface DeleteReplicationControllerSpec {
deleteServices: boolean;
}
export interface NamespaceSpec {
name: string;
}
export interface ReplicationControllerPodWithContainers {
name: string;
startTime?: string;
totalRestartCount: number;
podContainers: PodContainer[];
}
export interface ReplicationControllerPods {
pods: ReplicationControllerPodWithContainers[];
}
export interface LogSources {
podNames: string[];
containerNames: string[];
initContainerNames: string[];
}
export interface LogDetails {
info: LogInfo;
logs: LogLine[];
selection: LogSelection;
}
export interface LogInfo {
podName: string;
containerName: string;
initContainerName: string;
fromDate: string;
toDate: string;
truncated: boolean;
}
export interface LogLine {
timestamp: string;
content: string;
}
export enum LogControl {
LoadStart = 'beginning',
LoadEnd = 'end',
TimestampOldest = 'oldest',
TimestampNewest = 'newest',
}
export interface LogSelection {
logFilePosition: LogControl;
referencePoint: LogLineReference;
offsetFrom: number;
offsetTo: number;
}
export interface LogLineReference {
timestamp: LogControl;
lineNum: number;
}
export type LogOptions = {
previous: boolean;
timestamps: boolean;
};
export interface Protocols {
protocols: string[];
}
export interface SecretSpec {
name: string;
namespace: string;
data: string;
}
export interface LimitRange {
resourceType: string;
resourceName: string;
min: string;
max: string;
default: string;
defaultRequest: string;
maxLimitRequestRatio: string;
}
export interface ScaleTargetRef {
kind: string;
name: string;
}
export interface GlobalSettings {
clusterName: string;
itemsPerPage: number;
labelsLimit: number;
logsAutoRefreshTimeInterval: number;
resourceAutoRefreshTimeInterval: number;
disableAccessDeniedNotifications: boolean;
defaultNamespace: string;
namespaceFallbackList: string[];
}
export interface PinnedResource {
kind: string;
name: string;
displayName: string;
namespace?: string;
namespaced: boolean;
}
export interface APIVersion {
name: string;
}
export interface LoginSpec {
username: string;
password: string;
token: string;
kubeConfig: string;
}
export interface AuthResponse {
jweToken: string;
errors: K8sError[];
}
export interface LoginStatus {
tokenPresent: boolean;
headerPresent: boolean;
httpsMode: boolean;
}
export type AuthenticationMode = string;
export interface EnabledAuthenticationModes {
modes: AuthenticationMode[];
}
export interface LoginSkippableResponse {
skippable: boolean;
}
export interface SystemBanner {
message: string;
severity: string;
}
export interface TerminalResponse {
id: string;
}
export interface ShellFrame {
Op: string;
Data?: string;
SessionID?: string;
Rows?: number;
Cols?: number;
}
export interface TerminalPageParams {
namespace: string;
resourceKind: string;
resourceName: string;
pod?: string;
container?: string;
}
export interface SockJSSimpleEvent {
type: string;
toString(): string;
}
export interface SJSCloseEvent extends SockJSSimpleEvent {
code: number;
reason: string;
wasClean: boolean;
}
export interface SJSMessageEvent extends SockJSSimpleEvent {
data: string;
}
export interface Plugin extends Resource {
name: string;
path: string;
dependencies: string[];
}
export interface PluginList extends ResourceList {
items?: Plugin[];
} | the_stack |
import { ClientOptions } from "../../client";
import { CustomerOptions } from "../../types";
import { Service } from "../../service";
import { resources, services, protobuf, longrunning } from "../index";
import {
BaseMutationHookArgs,
HookedCancellation,
HookedResolution,
Hooks,
} from "../../hooks";
export default class ServiceFactory extends Service {
constructor(
clientOptions: ClientOptions,
customerOptions: CustomerOptions,
hooks?: Hooks
) {
super(clientOptions, customerOptions, hooks ?? {});
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AccessibleBiddingStrategyService
*/
public get accessibleBiddingStrategies() {
const service = this.loadService<services.AccessibleBiddingStrategyService>("AccessibleBiddingStrategyServiceClient")
return {
/**
* @description Retrieve a resources.AccessibleBiddingStrategy in full detail
* @warning Don't use get in production!
* @returns resources.AccessibleBiddingStrategy
*/
get: async (resourceName: string): Promise<resources.AccessibleBiddingStrategy> => {
const request = new services.GetAccessibleBiddingStrategyRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAccessibleBiddingStrategy(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AccountBudgetProposalService
*/
public get accountBudgetProposals() {
const service = this.loadService<services.AccountBudgetProposalService>("AccountBudgetProposalServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAccountBudgetProposalRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.AccountBudgetProposal in full detail
* @warning Don't use get in production!
* @returns resources.AccountBudgetProposal
*/
get: async (resourceName: string): Promise<resources.AccountBudgetProposal> => {
const request = new services.GetAccountBudgetProposalRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAccountBudgetProposal(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAccountBudgetProposal
* @returns services.MutateAccountBudgetProposalResponse
*/
create: async (
accountBudgetProposals: (resources.IAccountBudgetProposal | resources.AccountBudgetProposal)[] ,
options?: MutateOptions
): Promise<services.MutateAccountBudgetProposalResponse > => {
const ops = this.buildOperations<
services.AccountBudgetProposalOperation,
resources.IAccountBudgetProposal
>(
"create",
accountBudgetProposals
);
const request = this.buildRequest<
services.AccountBudgetProposalOperation,
services.IMutateAccountBudgetProposalRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AccountBudgetProposalService.mutateAccountBudgetProposal",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAccountBudgetProposal(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAccountBudgetProposalResponse
*/
remove: async (
accountBudgetProposals: string[] ,
options?: MutateOptions
): Promise<services.MutateAccountBudgetProposalResponse > => {
const ops = this.buildOperations<
services.AccountBudgetProposalOperation,
string
>(
"remove",
accountBudgetProposals
);
const request = this.buildRequest<
services.AccountBudgetProposalOperation,
services.IMutateAccountBudgetProposalRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AccountBudgetProposalService.mutateAccountBudgetProposal",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAccountBudgetProposal(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AccountBudgetService
*/
public get accountBudgets() {
const service = this.loadService<services.AccountBudgetService>("AccountBudgetServiceClient")
return {
/**
* @description Retrieve a resources.AccountBudget in full detail
* @warning Don't use get in production!
* @returns resources.AccountBudget
*/
get: async (resourceName: string): Promise<resources.AccountBudget> => {
const request = new services.GetAccountBudgetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAccountBudget(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AccountLinkService
*/
public get accountLinks() {
const service = this.loadService<services.AccountLinkService>("AccountLinkServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAccountLinkRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AccountLink in full detail
* @warning Don't use get in production!
* @returns resources.AccountLink
*/
get: async (resourceName: string): Promise<resources.AccountLink> => {
const request = new services.GetAccountLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAccountLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AccountLinkService#createaccountlink
*/
createAccountLink: async (request: services.CreateAccountLinkRequest): Promise<services.CreateAccountLinkResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.createAccountLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.IAccountLink
* @returns services.MutateAccountLinkResponse
*/
update: async (
accountLinks: (resources.IAccountLink | resources.AccountLink)[] ,
options?: MutateOptions
): Promise<services.MutateAccountLinkResponse > => {
const ops = this.buildOperations<
services.AccountLinkOperation,
resources.IAccountLink
>(
"update",
accountLinks
// @ts-expect-error Static class type here is fine
, resources.AccountLink
);
const request = this.buildRequest<
services.AccountLinkOperation,
services.IMutateAccountLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AccountLinkService.mutateAccountLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAccountLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAccountLinkResponse
*/
remove: async (
accountLinks: string[] ,
options?: MutateOptions
): Promise<services.MutateAccountLinkResponse > => {
const ops = this.buildOperations<
services.AccountLinkOperation,
string
>(
"remove",
accountLinks
);
const request = this.buildRequest<
services.AccountLinkOperation,
services.IMutateAccountLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AccountLinkService.mutateAccountLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAccountLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupAdAssetViewService
*/
public get adGroupAdAssetViews() {
const service = this.loadService<services.AdGroupAdAssetViewService>("AdGroupAdAssetViewServiceClient")
return {
/**
* @description Retrieve a resources.AdGroupAdAssetView in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupAdAssetView
*/
get: async (resourceName: string): Promise<resources.AdGroupAdAssetView> => {
const request = new services.GetAdGroupAdAssetViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupAdAssetView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupAdLabelService
*/
public get adGroupAdLabels() {
const service = this.loadService<services.AdGroupAdLabelService>("AdGroupAdLabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupAdLabelsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AdGroupAdLabel in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupAdLabel
*/
get: async (resourceName: string): Promise<resources.AdGroupAdLabel> => {
const request = new services.GetAdGroupAdLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupAdLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupAdLabel
* @returns services.MutateAdGroupAdLabelsResponse
*/
create: async (
adGroupAdLabels: (resources.IAdGroupAdLabel | resources.AdGroupAdLabel)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAdLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupAdLabelOperation,
resources.IAdGroupAdLabel
>(
"create",
adGroupAdLabels
);
const request = this.buildRequest<
services.AdGroupAdLabelOperation,
services.IMutateAdGroupAdLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAdLabelService.mutateAdGroupAdLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAdLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupAdLabelsResponse
*/
remove: async (
adGroupAdLabels: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAdLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupAdLabelOperation,
string
>(
"remove",
adGroupAdLabels
);
const request = this.buildRequest<
services.AdGroupAdLabelOperation,
services.IMutateAdGroupAdLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAdLabelService.mutateAdGroupAdLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAdLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupAdService
*/
public get adGroupAds() {
const service = this.loadService<services.AdGroupAdService>("AdGroupAdServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupAdsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroupAd in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupAd
*/
get: async (resourceName: string): Promise<resources.AdGroupAd> => {
const request = new services.GetAdGroupAdRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupAd(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupAd
* @returns services.MutateAdGroupAdsResponse
*/
create: async (
adGroupAds: (resources.IAdGroupAd | resources.AdGroupAd)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAdsResponse > => {
const ops = this.buildOperations<
services.AdGroupAdOperation,
resources.IAdGroupAd
>(
"create",
adGroupAds
);
const request = this.buildRequest<
services.AdGroupAdOperation,
services.IMutateAdGroupAdsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAdService.mutateAdGroupAds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupAd
* @returns services.MutateAdGroupAdsResponse
*/
update: async (
adGroupAds: (resources.IAdGroupAd | resources.AdGroupAd)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAdsResponse > => {
const ops = this.buildOperations<
services.AdGroupAdOperation,
resources.IAdGroupAd
>(
"update",
adGroupAds
// @ts-expect-error Static class type here is fine
, resources.AdGroupAd
);
const request = this.buildRequest<
services.AdGroupAdOperation,
services.IMutateAdGroupAdsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAdService.mutateAdGroupAds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupAdsResponse
*/
remove: async (
adGroupAds: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAdsResponse > => {
const ops = this.buildOperations<
services.AdGroupAdOperation,
string
>(
"remove",
adGroupAds
);
const request = this.buildRequest<
services.AdGroupAdOperation,
services.IMutateAdGroupAdsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAdService.mutateAdGroupAds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupAssetService
*/
public get adGroupAssets() {
const service = this.loadService<services.AdGroupAssetService>("AdGroupAssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupAssetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroupAsset in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupAsset
*/
get: async (resourceName: string): Promise<resources.AdGroupAsset> => {
const request = new services.GetAdGroupAssetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupAsset(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupAsset
* @returns services.MutateAdGroupAssetsResponse
*/
create: async (
adGroupAssets: (resources.IAdGroupAsset | resources.AdGroupAsset)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AdGroupAssetOperation,
resources.IAdGroupAsset
>(
"create",
adGroupAssets
);
const request = this.buildRequest<
services.AdGroupAssetOperation,
services.IMutateAdGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAssetService.mutateAdGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupAsset
* @returns services.MutateAdGroupAssetsResponse
*/
update: async (
adGroupAssets: (resources.IAdGroupAsset | resources.AdGroupAsset)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AdGroupAssetOperation,
resources.IAdGroupAsset
>(
"update",
adGroupAssets
// @ts-expect-error Static class type here is fine
, resources.AdGroupAsset
);
const request = this.buildRequest<
services.AdGroupAssetOperation,
services.IMutateAdGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAssetService.mutateAdGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupAssetsResponse
*/
remove: async (
adGroupAssets: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AdGroupAssetOperation,
string
>(
"remove",
adGroupAssets
);
const request = this.buildRequest<
services.AdGroupAssetOperation,
services.IMutateAdGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupAssetService.mutateAdGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupAudienceViewService
*/
public get adGroupAudienceViews() {
const service = this.loadService<services.AdGroupAudienceViewService>("AdGroupAudienceViewServiceClient")
return {
/**
* @description Retrieve a resources.AdGroupAudienceView in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupAudienceView
*/
get: async (resourceName: string): Promise<resources.AdGroupAudienceView> => {
const request = new services.GetAdGroupAudienceViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupAudienceView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupBidModifierService
*/
public get adGroupBidModifiers() {
const service = this.loadService<services.AdGroupBidModifierService>("AdGroupBidModifierServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupBidModifiersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroupBidModifier in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupBidModifier
*/
get: async (resourceName: string): Promise<resources.AdGroupBidModifier> => {
const request = new services.GetAdGroupBidModifierRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupBidModifier(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupBidModifier
* @returns services.MutateAdGroupBidModifiersResponse
*/
create: async (
adGroupBidModifiers: (resources.IAdGroupBidModifier | resources.AdGroupBidModifier)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupBidModifiersResponse > => {
const ops = this.buildOperations<
services.AdGroupBidModifierOperation,
resources.IAdGroupBidModifier
>(
"create",
adGroupBidModifiers
);
const request = this.buildRequest<
services.AdGroupBidModifierOperation,
services.IMutateAdGroupBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupBidModifierService.mutateAdGroupBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupBidModifier
* @returns services.MutateAdGroupBidModifiersResponse
*/
update: async (
adGroupBidModifiers: (resources.IAdGroupBidModifier | resources.AdGroupBidModifier)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupBidModifiersResponse > => {
const ops = this.buildOperations<
services.AdGroupBidModifierOperation,
resources.IAdGroupBidModifier
>(
"update",
adGroupBidModifiers
// @ts-expect-error Static class type here is fine
, resources.AdGroupBidModifier
);
const request = this.buildRequest<
services.AdGroupBidModifierOperation,
services.IMutateAdGroupBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupBidModifierService.mutateAdGroupBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupBidModifiersResponse
*/
remove: async (
adGroupBidModifiers: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupBidModifiersResponse > => {
const ops = this.buildOperations<
services.AdGroupBidModifierOperation,
string
>(
"remove",
adGroupBidModifiers
);
const request = this.buildRequest<
services.AdGroupBidModifierOperation,
services.IMutateAdGroupBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupBidModifierService.mutateAdGroupBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupCriterionCustomizerService
*/
public get adGroupCriterionCustomizers() {
const service = this.loadService<services.AdGroupCriterionCustomizerService>("AdGroupCriterionCustomizerServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupCriterionCustomizersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.IAdGroupCriterionCustomizer
* @returns services.MutateAdGroupCriterionCustomizersResponse
*/
create: async (
adGroupCriterionCustomizers: (resources.IAdGroupCriterionCustomizer | resources.AdGroupCriterionCustomizer)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriterionCustomizersResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionCustomizerOperation,
resources.IAdGroupCriterionCustomizer
>(
"create",
adGroupCriterionCustomizers
);
const request = this.buildRequest<
services.AdGroupCriterionCustomizerOperation,
services.IMutateAdGroupCriterionCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionCustomizerService.mutateAdGroupCriterionCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriterionCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupCriterionCustomizersResponse
*/
remove: async (
adGroupCriterionCustomizers: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriterionCustomizersResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionCustomizerOperation,
string
>(
"remove",
adGroupCriterionCustomizers
);
const request = this.buildRequest<
services.AdGroupCriterionCustomizerOperation,
services.IMutateAdGroupCriterionCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionCustomizerService.mutateAdGroupCriterionCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriterionCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupCriterionLabelService
*/
public get adGroupCriterionLabels() {
const service = this.loadService<services.AdGroupCriterionLabelService>("AdGroupCriterionLabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupCriterionLabelsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AdGroupCriterionLabel in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupCriterionLabel
*/
get: async (resourceName: string): Promise<resources.AdGroupCriterionLabel> => {
const request = new services.GetAdGroupCriterionLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupCriterionLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupCriterionLabel
* @returns services.MutateAdGroupCriterionLabelsResponse
*/
create: async (
adGroupCriterionLabels: (resources.IAdGroupCriterionLabel | resources.AdGroupCriterionLabel)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriterionLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionLabelOperation,
resources.IAdGroupCriterionLabel
>(
"create",
adGroupCriterionLabels
);
const request = this.buildRequest<
services.AdGroupCriterionLabelOperation,
services.IMutateAdGroupCriterionLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionLabelService.mutateAdGroupCriterionLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriterionLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupCriterionLabelsResponse
*/
remove: async (
adGroupCriterionLabels: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriterionLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionLabelOperation,
string
>(
"remove",
adGroupCriterionLabels
);
const request = this.buildRequest<
services.AdGroupCriterionLabelOperation,
services.IMutateAdGroupCriterionLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionLabelService.mutateAdGroupCriterionLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriterionLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupCriterionService
*/
public get adGroupCriteria() {
const service = this.loadService<services.AdGroupCriterionService>("AdGroupCriterionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupCriteriaRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroupCriterion in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupCriterion
*/
get: async (resourceName: string): Promise<resources.AdGroupCriterion> => {
const request = new services.GetAdGroupCriterionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupCriterion(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupCriterion
* @returns services.MutateAdGroupCriteriaResponse
*/
create: async (
adGroupCriteria: (resources.IAdGroupCriterion | resources.AdGroupCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriteriaResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionOperation,
resources.IAdGroupCriterion
>(
"create",
adGroupCriteria
);
const request = this.buildRequest<
services.AdGroupCriterionOperation,
services.IMutateAdGroupCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionService.mutateAdGroupCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupCriterion
* @returns services.MutateAdGroupCriteriaResponse
*/
update: async (
adGroupCriteria: (resources.IAdGroupCriterion | resources.AdGroupCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriteriaResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionOperation,
resources.IAdGroupCriterion
>(
"update",
adGroupCriteria
// @ts-expect-error Static class type here is fine
, resources.AdGroupCriterion
);
const request = this.buildRequest<
services.AdGroupCriterionOperation,
services.IMutateAdGroupCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionService.mutateAdGroupCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupCriteriaResponse
*/
remove: async (
adGroupCriteria: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCriteriaResponse > => {
const ops = this.buildOperations<
services.AdGroupCriterionOperation,
string
>(
"remove",
adGroupCriteria
);
const request = this.buildRequest<
services.AdGroupCriterionOperation,
services.IMutateAdGroupCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCriterionService.mutateAdGroupCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupCriterionSimulationService
*/
public get adGroupCriterionSimulations() {
const service = this.loadService<services.AdGroupCriterionSimulationService>("AdGroupCriterionSimulationServiceClient")
return {
/**
* @description Retrieve a resources.AdGroupCriterionSimulation in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupCriterionSimulation
*/
get: async (resourceName: string): Promise<resources.AdGroupCriterionSimulation> => {
const request = new services.GetAdGroupCriterionSimulationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupCriterionSimulation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupCustomizerService
*/
public get adGroupCustomizers() {
const service = this.loadService<services.AdGroupCustomizerService>("AdGroupCustomizerServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupCustomizersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.IAdGroupCustomizer
* @returns services.MutateAdGroupCustomizersResponse
*/
create: async (
adGroupCustomizers: (resources.IAdGroupCustomizer | resources.AdGroupCustomizer)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCustomizersResponse > => {
const ops = this.buildOperations<
services.AdGroupCustomizerOperation,
resources.IAdGroupCustomizer
>(
"create",
adGroupCustomizers
);
const request = this.buildRequest<
services.AdGroupCustomizerOperation,
services.IMutateAdGroupCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCustomizerService.mutateAdGroupCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupCustomizersResponse
*/
remove: async (
adGroupCustomizers: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupCustomizersResponse > => {
const ops = this.buildOperations<
services.AdGroupCustomizerOperation,
string
>(
"remove",
adGroupCustomizers
);
const request = this.buildRequest<
services.AdGroupCustomizerOperation,
services.IMutateAdGroupCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupCustomizerService.mutateAdGroupCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupExtensionSettingService
*/
public get adGroupExtensionSettings() {
const service = this.loadService<services.AdGroupExtensionSettingService>("AdGroupExtensionSettingServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupExtensionSettingsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AdGroupExtensionSetting in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupExtensionSetting
*/
get: async (resourceName: string): Promise<resources.AdGroupExtensionSetting> => {
const request = new services.GetAdGroupExtensionSettingRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupExtensionSetting(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupExtensionSetting
* @returns services.MutateAdGroupExtensionSettingsResponse
*/
create: async (
adGroupExtensionSettings: (resources.IAdGroupExtensionSetting | resources.AdGroupExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.AdGroupExtensionSettingOperation,
resources.IAdGroupExtensionSetting
>(
"create",
adGroupExtensionSettings
);
const request = this.buildRequest<
services.AdGroupExtensionSettingOperation,
services.IMutateAdGroupExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupExtensionSettingService.mutateAdGroupExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupExtensionSetting
* @returns services.MutateAdGroupExtensionSettingsResponse
*/
update: async (
adGroupExtensionSettings: (resources.IAdGroupExtensionSetting | resources.AdGroupExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.AdGroupExtensionSettingOperation,
resources.IAdGroupExtensionSetting
>(
"update",
adGroupExtensionSettings
// @ts-expect-error Static class type here is fine
, resources.AdGroupExtensionSetting
);
const request = this.buildRequest<
services.AdGroupExtensionSettingOperation,
services.IMutateAdGroupExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupExtensionSettingService.mutateAdGroupExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupExtensionSettingsResponse
*/
remove: async (
adGroupExtensionSettings: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.AdGroupExtensionSettingOperation,
string
>(
"remove",
adGroupExtensionSettings
);
const request = this.buildRequest<
services.AdGroupExtensionSettingOperation,
services.IMutateAdGroupExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupExtensionSettingService.mutateAdGroupExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupFeedService
*/
public get adGroupFeeds() {
const service = this.loadService<services.AdGroupFeedService>("AdGroupFeedServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupFeedsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroupFeed in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupFeed
*/
get: async (resourceName: string): Promise<resources.AdGroupFeed> => {
const request = new services.GetAdGroupFeedRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupFeed(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupFeed
* @returns services.MutateAdGroupFeedsResponse
*/
create: async (
adGroupFeeds: (resources.IAdGroupFeed | resources.AdGroupFeed)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupFeedsResponse > => {
const ops = this.buildOperations<
services.AdGroupFeedOperation,
resources.IAdGroupFeed
>(
"create",
adGroupFeeds
);
const request = this.buildRequest<
services.AdGroupFeedOperation,
services.IMutateAdGroupFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupFeedService.mutateAdGroupFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroupFeed
* @returns services.MutateAdGroupFeedsResponse
*/
update: async (
adGroupFeeds: (resources.IAdGroupFeed | resources.AdGroupFeed)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupFeedsResponse > => {
const ops = this.buildOperations<
services.AdGroupFeedOperation,
resources.IAdGroupFeed
>(
"update",
adGroupFeeds
// @ts-expect-error Static class type here is fine
, resources.AdGroupFeed
);
const request = this.buildRequest<
services.AdGroupFeedOperation,
services.IMutateAdGroupFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupFeedService.mutateAdGroupFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupFeedsResponse
*/
remove: async (
adGroupFeeds: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupFeedsResponse > => {
const ops = this.buildOperations<
services.AdGroupFeedOperation,
string
>(
"remove",
adGroupFeeds
);
const request = this.buildRequest<
services.AdGroupFeedOperation,
services.IMutateAdGroupFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupFeedService.mutateAdGroupFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupLabelService
*/
public get adGroupLabels() {
const service = this.loadService<services.AdGroupLabelService>("AdGroupLabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupLabelsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AdGroupLabel in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupLabel
*/
get: async (resourceName: string): Promise<resources.AdGroupLabel> => {
const request = new services.GetAdGroupLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroupLabel
* @returns services.MutateAdGroupLabelsResponse
*/
create: async (
adGroupLabels: (resources.IAdGroupLabel | resources.AdGroupLabel)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupLabelOperation,
resources.IAdGroupLabel
>(
"create",
adGroupLabels
);
const request = this.buildRequest<
services.AdGroupLabelOperation,
services.IMutateAdGroupLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupLabelService.mutateAdGroupLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupLabelsResponse
*/
remove: async (
adGroupLabels: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupLabelsResponse > => {
const ops = this.buildOperations<
services.AdGroupLabelOperation,
string
>(
"remove",
adGroupLabels
);
const request = this.buildRequest<
services.AdGroupLabelOperation,
services.IMutateAdGroupLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupLabelService.mutateAdGroupLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroupLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupService
*/
public get adGroups() {
const service = this.loadService<services.AdGroupService>("AdGroupServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdGroupsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdGroup in full detail
* @warning Don't use get in production!
* @returns resources.AdGroup
*/
get: async (resourceName: string): Promise<resources.AdGroup> => {
const request = new services.GetAdGroupRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdGroup
* @returns services.MutateAdGroupsResponse
*/
create: async (
adGroups: (resources.IAdGroup | resources.AdGroup)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupsResponse > => {
const ops = this.buildOperations<
services.AdGroupOperation,
resources.IAdGroup
>(
"create",
adGroups
);
const request = this.buildRequest<
services.AdGroupOperation,
services.IMutateAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupService.mutateAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdGroup
* @returns services.MutateAdGroupsResponse
*/
update: async (
adGroups: (resources.IAdGroup | resources.AdGroup)[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupsResponse > => {
const ops = this.buildOperations<
services.AdGroupOperation,
resources.IAdGroup
>(
"update",
adGroups
// @ts-expect-error Static class type here is fine
, resources.AdGroup
);
const request = this.buildRequest<
services.AdGroupOperation,
services.IMutateAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupService.mutateAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdGroupsResponse
*/
remove: async (
adGroups: string[] ,
options?: MutateOptions
): Promise<services.MutateAdGroupsResponse > => {
const ops = this.buildOperations<
services.AdGroupOperation,
string
>(
"remove",
adGroups
);
const request = this.buildRequest<
services.AdGroupOperation,
services.IMutateAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdGroupService.mutateAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdGroupSimulationService
*/
public get adGroupSimulations() {
const service = this.loadService<services.AdGroupSimulationService>("AdGroupSimulationServiceClient")
return {
/**
* @description Retrieve a resources.AdGroupSimulation in full detail
* @warning Don't use get in production!
* @returns resources.AdGroupSimulation
*/
get: async (resourceName: string): Promise<resources.AdGroupSimulation> => {
const request = new services.GetAdGroupSimulationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdGroupSimulation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdParameterService
*/
public get adParameters() {
const service = this.loadService<services.AdParameterService>("AdParameterServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdParametersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.AdParameter in full detail
* @warning Don't use get in production!
* @returns resources.AdParameter
*/
get: async (resourceName: string): Promise<resources.AdParameter> => {
const request = new services.GetAdParameterRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdParameter(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAdParameter
* @returns services.MutateAdParametersResponse
*/
create: async (
adParameters: (resources.IAdParameter | resources.AdParameter)[] ,
options?: MutateOptions
): Promise<services.MutateAdParametersResponse > => {
const ops = this.buildOperations<
services.AdParameterOperation,
resources.IAdParameter
>(
"create",
adParameters
);
const request = this.buildRequest<
services.AdParameterOperation,
services.IMutateAdParametersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdParameterService.mutateAdParameters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdParameters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAdParameter
* @returns services.MutateAdParametersResponse
*/
update: async (
adParameters: (resources.IAdParameter | resources.AdParameter)[] ,
options?: MutateOptions
): Promise<services.MutateAdParametersResponse > => {
const ops = this.buildOperations<
services.AdParameterOperation,
resources.IAdParameter
>(
"update",
adParameters
// @ts-expect-error Static class type here is fine
, resources.AdParameter
);
const request = this.buildRequest<
services.AdParameterOperation,
services.IMutateAdParametersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdParameterService.mutateAdParameters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdParameters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAdParametersResponse
*/
remove: async (
adParameters: string[] ,
options?: MutateOptions
): Promise<services.MutateAdParametersResponse > => {
const ops = this.buildOperations<
services.AdParameterOperation,
string
>(
"remove",
adParameters
);
const request = this.buildRequest<
services.AdParameterOperation,
services.IMutateAdParametersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdParameterService.mutateAdParameters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAdParameters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdScheduleViewService
*/
public get adScheduleViews() {
const service = this.loadService<services.AdScheduleViewService>("AdScheduleViewServiceClient")
return {
/**
* @description Retrieve a resources.AdScheduleView in full detail
* @warning Don't use get in production!
* @returns resources.AdScheduleView
*/
get: async (resourceName: string): Promise<resources.AdScheduleView> => {
const request = new services.GetAdScheduleViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAdScheduleView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AdService
*/
public get ads() {
const service = this.loadService<services.AdService>("AdServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAdsRequest, "partial_failure"|"response_content_type"|"validate_only">>
return {
/**
* @description Retrieve a resources.Ad in full detail
* @warning Don't use get in production!
* @returns resources.Ad
*/
get: async (resourceName: string): Promise<resources.Ad> => {
const request = new services.GetAdRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAd(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.IAd
* @returns services.MutateAdsResponse
*/
update: async (
ads: (resources.IAd | resources.Ad)[] ,
options?: MutateOptions
): Promise<services.MutateAdsResponse > => {
const ops = this.buildOperations<
services.AdOperation,
resources.IAd
>(
"update",
ads
// @ts-expect-error Static class type here is fine
, resources.Ad
);
const request = this.buildRequest<
services.AdOperation,
services.IMutateAdsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AdService.mutateAds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AgeRangeViewService
*/
public get ageRangeViews() {
const service = this.loadService<services.AgeRangeViewService>("AgeRangeViewServiceClient")
return {
/**
* @description Retrieve a resources.AgeRangeView in full detail
* @warning Don't use get in production!
* @returns resources.AgeRangeView
*/
get: async (resourceName: string): Promise<resources.AgeRangeView> => {
const request = new services.GetAgeRangeViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAgeRangeView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetFieldTypeViewService
*/
public get assetFieldTypeViews() {
const service = this.loadService<services.AssetFieldTypeViewService>("AssetFieldTypeViewServiceClient")
return {
/**
* @description Retrieve a resources.AssetFieldTypeView in full detail
* @warning Don't use get in production!
* @returns resources.AssetFieldTypeView
*/
get: async (resourceName: string): Promise<resources.AssetFieldTypeView> => {
const request = new services.GetAssetFieldTypeViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAssetFieldTypeView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetGroupAssetService
*/
public get assetGroupAssets() {
const service = this.loadService<services.AssetGroupAssetService>("AssetGroupAssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetGroupAssetsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.AssetGroupAsset in full detail
* @warning Don't use get in production!
* @returns resources.AssetGroupAsset
*/
get: async (resourceName: string): Promise<resources.AssetGroupAsset> => {
const request = new services.GetAssetGroupAssetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAssetGroupAsset(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAssetGroupAsset
* @returns services.MutateAssetGroupAssetsResponse
*/
create: async (
assetGroupAssets: (resources.IAssetGroupAsset | resources.AssetGroupAsset)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AssetGroupAssetOperation,
resources.IAssetGroupAsset
>(
"create",
assetGroupAssets
);
const request = this.buildRequest<
services.AssetGroupAssetOperation,
services.IMutateAssetGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupAssetService.mutateAssetGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAssetGroupAsset
* @returns services.MutateAssetGroupAssetsResponse
*/
update: async (
assetGroupAssets: (resources.IAssetGroupAsset | resources.AssetGroupAsset)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AssetGroupAssetOperation,
resources.IAssetGroupAsset
>(
"update",
assetGroupAssets
// @ts-expect-error Static class type here is fine
, resources.AssetGroupAsset
);
const request = this.buildRequest<
services.AssetGroupAssetOperation,
services.IMutateAssetGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupAssetService.mutateAssetGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAssetGroupAssetsResponse
*/
remove: async (
assetGroupAssets: string[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupAssetsResponse > => {
const ops = this.buildOperations<
services.AssetGroupAssetOperation,
string
>(
"remove",
assetGroupAssets
);
const request = this.buildRequest<
services.AssetGroupAssetOperation,
services.IMutateAssetGroupAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupAssetService.mutateAssetGroupAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetGroupListingGroupFilterService
*/
public get assetGroupListingGroupFilters() {
const service = this.loadService<services.AssetGroupListingGroupFilterService>("AssetGroupListingGroupFilterServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetGroupListingGroupFiltersRequest, "validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.IAssetGroupListingGroupFilter
* @returns services.MutateAssetGroupListingGroupFiltersResponse
*/
create: async (
assetGroupListingGroupFilters: (resources.IAssetGroupListingGroupFilter | resources.AssetGroupListingGroupFilter)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupListingGroupFiltersResponse > => {
const ops = this.buildOperations<
services.AssetGroupListingGroupFilterOperation,
resources.IAssetGroupListingGroupFilter
>(
"create",
assetGroupListingGroupFilters
);
const request = this.buildRequest<
services.AssetGroupListingGroupFilterOperation,
services.IMutateAssetGroupListingGroupFiltersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupListingGroupFilterService.mutateAssetGroupListingGroupFilters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupListingGroupFilters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAssetGroupListingGroupFilter
* @returns services.MutateAssetGroupListingGroupFiltersResponse
*/
update: async (
assetGroupListingGroupFilters: (resources.IAssetGroupListingGroupFilter | resources.AssetGroupListingGroupFilter)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupListingGroupFiltersResponse > => {
const ops = this.buildOperations<
services.AssetGroupListingGroupFilterOperation,
resources.IAssetGroupListingGroupFilter
>(
"update",
assetGroupListingGroupFilters
// @ts-expect-error Static class type here is fine
, resources.AssetGroupListingGroupFilter
);
const request = this.buildRequest<
services.AssetGroupListingGroupFilterOperation,
services.IMutateAssetGroupListingGroupFiltersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupListingGroupFilterService.mutateAssetGroupListingGroupFilters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupListingGroupFilters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAssetGroupListingGroupFiltersResponse
*/
remove: async (
assetGroupListingGroupFilters: string[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupListingGroupFiltersResponse > => {
const ops = this.buildOperations<
services.AssetGroupListingGroupFilterOperation,
string
>(
"remove",
assetGroupListingGroupFilters
);
const request = this.buildRequest<
services.AssetGroupListingGroupFilterOperation,
services.IMutateAssetGroupListingGroupFiltersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupListingGroupFilterService.mutateAssetGroupListingGroupFilters",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroupListingGroupFilters(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetGroupService
*/
public get assetGroups() {
const service = this.loadService<services.AssetGroupService>("AssetGroupServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetGroupsRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.AssetGroup in full detail
* @warning Don't use get in production!
* @returns resources.AssetGroup
*/
get: async (resourceName: string): Promise<resources.AssetGroup> => {
const request = new services.GetAssetGroupRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAssetGroup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAssetGroup
* @returns services.MutateAssetGroupsResponse
*/
create: async (
assetGroups: (resources.IAssetGroup | resources.AssetGroup)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupsResponse > => {
const ops = this.buildOperations<
services.AssetGroupOperation,
resources.IAssetGroup
>(
"create",
assetGroups
);
const request = this.buildRequest<
services.AssetGroupOperation,
services.IMutateAssetGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupService.mutateAssetGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAssetGroup
* @returns services.MutateAssetGroupsResponse
*/
update: async (
assetGroups: (resources.IAssetGroup | resources.AssetGroup)[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupsResponse > => {
const ops = this.buildOperations<
services.AssetGroupOperation,
resources.IAssetGroup
>(
"update",
assetGroups
// @ts-expect-error Static class type here is fine
, resources.AssetGroup
);
const request = this.buildRequest<
services.AssetGroupOperation,
services.IMutateAssetGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupService.mutateAssetGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAssetGroupsResponse
*/
remove: async (
assetGroups: string[] ,
options?: MutateOptions
): Promise<services.MutateAssetGroupsResponse > => {
const ops = this.buildOperations<
services.AssetGroupOperation,
string
>(
"remove",
assetGroups
);
const request = this.buildRequest<
services.AssetGroupOperation,
services.IMutateAssetGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetGroupService.mutateAssetGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetService
*/
public get assets() {
const service = this.loadService<services.AssetService>("AssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetsRequest, "partial_failure"|"response_content_type"|"validate_only">>
return {
/**
* @description Retrieve a resources.Asset in full detail
* @warning Don't use get in production!
* @returns resources.Asset
*/
get: async (resourceName: string): Promise<resources.Asset> => {
const request = new services.GetAssetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getAsset(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IAsset
* @returns services.MutateAssetsResponse
*/
create: async (
assets: (resources.IAsset | resources.Asset)[] ,
options?: MutateOptions
): Promise<services.MutateAssetsResponse > => {
const ops = this.buildOperations<
services.AssetOperation,
resources.IAsset
>(
"create",
assets
);
const request = this.buildRequest<
services.AssetOperation,
services.IMutateAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetService.mutateAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAsset
* @returns services.MutateAssetsResponse
*/
update: async (
assets: (resources.IAsset | resources.Asset)[] ,
options?: MutateOptions
): Promise<services.MutateAssetsResponse > => {
const ops = this.buildOperations<
services.AssetOperation,
resources.IAsset
>(
"update",
assets
// @ts-expect-error Static class type here is fine
, resources.Asset
);
const request = this.buildRequest<
services.AssetOperation,
services.IMutateAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetService.mutateAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetSetAssetService
*/
public get assetSetAssets() {
const service = this.loadService<services.AssetSetAssetService>("AssetSetAssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetSetAssetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.IAssetSetAsset
* @returns services.MutateAssetSetAssetsResponse
*/
create: async (
assetSetAssets: (resources.IAssetSetAsset | resources.AssetSetAsset)[] ,
options?: MutateOptions
): Promise<services.MutateAssetSetAssetsResponse > => {
const ops = this.buildOperations<
services.AssetSetAssetOperation,
resources.IAssetSetAsset
>(
"create",
assetSetAssets
);
const request = this.buildRequest<
services.AssetSetAssetOperation,
services.IMutateAssetSetAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetSetAssetService.mutateAssetSetAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetSetAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAssetSetAssetsResponse
*/
remove: async (
assetSetAssets: string[] ,
options?: MutateOptions
): Promise<services.MutateAssetSetAssetsResponse > => {
const ops = this.buildOperations<
services.AssetSetAssetOperation,
string
>(
"remove",
assetSetAssets
);
const request = this.buildRequest<
services.AssetSetAssetOperation,
services.IMutateAssetSetAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetSetAssetService.mutateAssetSetAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetSetAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/AssetSetService
*/
public get assetSets() {
const service = this.loadService<services.AssetSetService>("AssetSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateAssetSetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.IAssetSet
* @returns services.MutateAssetSetsResponse
*/
create: async (
assetSets: (resources.IAssetSet | resources.AssetSet)[] ,
options?: MutateOptions
): Promise<services.MutateAssetSetsResponse > => {
const ops = this.buildOperations<
services.AssetSetOperation,
resources.IAssetSet
>(
"create",
assetSets
);
const request = this.buildRequest<
services.AssetSetOperation,
services.IMutateAssetSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetSetService.mutateAssetSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IAssetSet
* @returns services.MutateAssetSetsResponse
*/
update: async (
assetSets: (resources.IAssetSet | resources.AssetSet)[] ,
options?: MutateOptions
): Promise<services.MutateAssetSetsResponse > => {
const ops = this.buildOperations<
services.AssetSetOperation,
resources.IAssetSet
>(
"update",
assetSets
// @ts-expect-error Static class type here is fine
, resources.AssetSet
);
const request = this.buildRequest<
services.AssetSetOperation,
services.IMutateAssetSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetSetService.mutateAssetSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateAssetSetsResponse
*/
remove: async (
assetSets: string[] ,
options?: MutateOptions
): Promise<services.MutateAssetSetsResponse > => {
const ops = this.buildOperations<
services.AssetSetOperation,
string
>(
"remove",
assetSets
);
const request = this.buildRequest<
services.AssetSetOperation,
services.IMutateAssetSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "AssetSetService.mutateAssetSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateAssetSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BatchJobService
*/
public get batchJobs() {
const service = this.loadService<services.BatchJobService>("BatchJobServiceClient")
type MutateOptions = never
return {
/**
* @description create resources of type resources.IBatchJob
* @returns services.MutateBatchJobResponse
*/
create: async (
batchJobs: (resources.IBatchJob | resources.BatchJob)[] ,
options?: MutateOptions
): Promise<services.MutateBatchJobResponse > => {
const ops = this.buildOperations<
services.BatchJobOperation,
resources.IBatchJob
>(
"create",
batchJobs
);
const request = this.buildRequest<
services.BatchJobOperation,
services.IMutateBatchJobRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BatchJobService.mutateBatchJob",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBatchJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description Retrieve a resources.BatchJob in full detail
* @warning Don't use get in production!
* @returns resources.BatchJob
*/
get: async (resourceName: string): Promise<resources.BatchJob> => {
const request = new services.GetBatchJobRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBatchJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BatchJobService#listbatchjobresults
*/
listBatchJobResults: async (request: services.ListBatchJobResultsRequest): Promise<services.ListBatchJobResultsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listBatchJobResults(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BatchJobService#runbatchjob
*/
runBatchJob: async (request: services.RunBatchJobRequest): Promise<longrunning.Operation> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.runBatchJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BatchJobService#addbatchjoboperations
*/
addBatchJobOperations: async (request: services.AddBatchJobOperationsRequest): Promise<services.AddBatchJobOperationsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.addBatchJobOperations(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BiddingDataExclusionService
*/
public get biddingDataExclusions() {
const service = this.loadService<services.BiddingDataExclusionService>("BiddingDataExclusionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateBiddingDataExclusionsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.BiddingDataExclusion in full detail
* @warning Don't use get in production!
* @returns resources.BiddingDataExclusion
*/
get: async (resourceName: string): Promise<resources.BiddingDataExclusion> => {
const request = new services.GetBiddingDataExclusionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBiddingDataExclusion(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IBiddingDataExclusion
* @returns services.MutateBiddingDataExclusionsResponse
*/
create: async (
biddingDataExclusions: (resources.IBiddingDataExclusion | resources.BiddingDataExclusion)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingDataExclusionsResponse > => {
const ops = this.buildOperations<
services.BiddingDataExclusionOperation,
resources.IBiddingDataExclusion
>(
"create",
biddingDataExclusions
);
const request = this.buildRequest<
services.BiddingDataExclusionOperation,
services.IMutateBiddingDataExclusionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingDataExclusionService.mutateBiddingDataExclusions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingDataExclusions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IBiddingDataExclusion
* @returns services.MutateBiddingDataExclusionsResponse
*/
update: async (
biddingDataExclusions: (resources.IBiddingDataExclusion | resources.BiddingDataExclusion)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingDataExclusionsResponse > => {
const ops = this.buildOperations<
services.BiddingDataExclusionOperation,
resources.IBiddingDataExclusion
>(
"update",
biddingDataExclusions
// @ts-expect-error Static class type here is fine
, resources.BiddingDataExclusion
);
const request = this.buildRequest<
services.BiddingDataExclusionOperation,
services.IMutateBiddingDataExclusionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingDataExclusionService.mutateBiddingDataExclusions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingDataExclusions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateBiddingDataExclusionsResponse
*/
remove: async (
biddingDataExclusions: string[] ,
options?: MutateOptions
): Promise<services.MutateBiddingDataExclusionsResponse > => {
const ops = this.buildOperations<
services.BiddingDataExclusionOperation,
string
>(
"remove",
biddingDataExclusions
);
const request = this.buildRequest<
services.BiddingDataExclusionOperation,
services.IMutateBiddingDataExclusionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingDataExclusionService.mutateBiddingDataExclusions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingDataExclusions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BiddingSeasonalityAdjustmentService
*/
public get biddingSeasonalityAdjustments() {
const service = this.loadService<services.BiddingSeasonalityAdjustmentService>("BiddingSeasonalityAdjustmentServiceClient")
type MutateOptions = Partial<Pick<services.IMutateBiddingSeasonalityAdjustmentsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.BiddingSeasonalityAdjustment in full detail
* @warning Don't use get in production!
* @returns resources.BiddingSeasonalityAdjustment
*/
get: async (resourceName: string): Promise<resources.BiddingSeasonalityAdjustment> => {
const request = new services.GetBiddingSeasonalityAdjustmentRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBiddingSeasonalityAdjustment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IBiddingSeasonalityAdjustment
* @returns services.MutateBiddingSeasonalityAdjustmentsResponse
*/
create: async (
biddingSeasonalityAdjustments: (resources.IBiddingSeasonalityAdjustment | resources.BiddingSeasonalityAdjustment)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingSeasonalityAdjustmentsResponse > => {
const ops = this.buildOperations<
services.BiddingSeasonalityAdjustmentOperation,
resources.IBiddingSeasonalityAdjustment
>(
"create",
biddingSeasonalityAdjustments
);
const request = this.buildRequest<
services.BiddingSeasonalityAdjustmentOperation,
services.IMutateBiddingSeasonalityAdjustmentsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingSeasonalityAdjustmentService.mutateBiddingSeasonalityAdjustments",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingSeasonalityAdjustments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IBiddingSeasonalityAdjustment
* @returns services.MutateBiddingSeasonalityAdjustmentsResponse
*/
update: async (
biddingSeasonalityAdjustments: (resources.IBiddingSeasonalityAdjustment | resources.BiddingSeasonalityAdjustment)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingSeasonalityAdjustmentsResponse > => {
const ops = this.buildOperations<
services.BiddingSeasonalityAdjustmentOperation,
resources.IBiddingSeasonalityAdjustment
>(
"update",
biddingSeasonalityAdjustments
// @ts-expect-error Static class type here is fine
, resources.BiddingSeasonalityAdjustment
);
const request = this.buildRequest<
services.BiddingSeasonalityAdjustmentOperation,
services.IMutateBiddingSeasonalityAdjustmentsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingSeasonalityAdjustmentService.mutateBiddingSeasonalityAdjustments",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingSeasonalityAdjustments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateBiddingSeasonalityAdjustmentsResponse
*/
remove: async (
biddingSeasonalityAdjustments: string[] ,
options?: MutateOptions
): Promise<services.MutateBiddingSeasonalityAdjustmentsResponse > => {
const ops = this.buildOperations<
services.BiddingSeasonalityAdjustmentOperation,
string
>(
"remove",
biddingSeasonalityAdjustments
);
const request = this.buildRequest<
services.BiddingSeasonalityAdjustmentOperation,
services.IMutateBiddingSeasonalityAdjustmentsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingSeasonalityAdjustmentService.mutateBiddingSeasonalityAdjustments",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingSeasonalityAdjustments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BiddingStrategyService
*/
public get biddingStrategies() {
const service = this.loadService<services.BiddingStrategyService>("BiddingStrategyServiceClient")
type MutateOptions = Partial<Pick<services.IMutateBiddingStrategiesRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.BiddingStrategy in full detail
* @warning Don't use get in production!
* @returns resources.BiddingStrategy
*/
get: async (resourceName: string): Promise<resources.BiddingStrategy> => {
const request = new services.GetBiddingStrategyRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBiddingStrategy(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IBiddingStrategy
* @returns services.MutateBiddingStrategiesResponse
*/
create: async (
biddingStrategies: (resources.IBiddingStrategy | resources.BiddingStrategy)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingStrategiesResponse > => {
const ops = this.buildOperations<
services.BiddingStrategyOperation,
resources.IBiddingStrategy
>(
"create",
biddingStrategies
);
const request = this.buildRequest<
services.BiddingStrategyOperation,
services.IMutateBiddingStrategiesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingStrategyService.mutateBiddingStrategies",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingStrategies(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IBiddingStrategy
* @returns services.MutateBiddingStrategiesResponse
*/
update: async (
biddingStrategies: (resources.IBiddingStrategy | resources.BiddingStrategy)[] ,
options?: MutateOptions
): Promise<services.MutateBiddingStrategiesResponse > => {
const ops = this.buildOperations<
services.BiddingStrategyOperation,
resources.IBiddingStrategy
>(
"update",
biddingStrategies
// @ts-expect-error Static class type here is fine
, resources.BiddingStrategy
);
const request = this.buildRequest<
services.BiddingStrategyOperation,
services.IMutateBiddingStrategiesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingStrategyService.mutateBiddingStrategies",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingStrategies(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateBiddingStrategiesResponse
*/
remove: async (
biddingStrategies: string[] ,
options?: MutateOptions
): Promise<services.MutateBiddingStrategiesResponse > => {
const ops = this.buildOperations<
services.BiddingStrategyOperation,
string
>(
"remove",
biddingStrategies
);
const request = this.buildRequest<
services.BiddingStrategyOperation,
services.IMutateBiddingStrategiesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BiddingStrategyService.mutateBiddingStrategies",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBiddingStrategies(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignAssetService
*/
public get campaignAssets() {
const service = this.loadService<services.CampaignAssetService>("CampaignAssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignAssetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignAsset in full detail
* @warning Don't use get in production!
* @returns resources.CampaignAsset
*/
get: async (resourceName: string): Promise<resources.CampaignAsset> => {
const request = new services.GetCampaignAssetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignAsset(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignAsset
* @returns services.MutateCampaignAssetsResponse
*/
create: async (
campaignAssets: (resources.ICampaignAsset | resources.CampaignAsset)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignAssetsResponse > => {
const ops = this.buildOperations<
services.CampaignAssetOperation,
resources.ICampaignAsset
>(
"create",
campaignAssets
);
const request = this.buildRequest<
services.CampaignAssetOperation,
services.IMutateCampaignAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignAssetService.mutateCampaignAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignAsset
* @returns services.MutateCampaignAssetsResponse
*/
update: async (
campaignAssets: (resources.ICampaignAsset | resources.CampaignAsset)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignAssetsResponse > => {
const ops = this.buildOperations<
services.CampaignAssetOperation,
resources.ICampaignAsset
>(
"update",
campaignAssets
// @ts-expect-error Static class type here is fine
, resources.CampaignAsset
);
const request = this.buildRequest<
services.CampaignAssetOperation,
services.IMutateCampaignAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignAssetService.mutateCampaignAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignAssetsResponse
*/
remove: async (
campaignAssets: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignAssetsResponse > => {
const ops = this.buildOperations<
services.CampaignAssetOperation,
string
>(
"remove",
campaignAssets
);
const request = this.buildRequest<
services.CampaignAssetOperation,
services.IMutateCampaignAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignAssetService.mutateCampaignAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignAssetSetService
*/
public get campaignAssetSets() {
const service = this.loadService<services.CampaignAssetSetService>("CampaignAssetSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignAssetSetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.ICampaignAssetSet
* @returns services.MutateCampaignAssetSetsResponse
*/
create: async (
campaignAssetSets: (resources.ICampaignAssetSet | resources.CampaignAssetSet)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignAssetSetsResponse > => {
const ops = this.buildOperations<
services.CampaignAssetSetOperation,
resources.ICampaignAssetSet
>(
"create",
campaignAssetSets
);
const request = this.buildRequest<
services.CampaignAssetSetOperation,
services.IMutateCampaignAssetSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignAssetSetService.mutateCampaignAssetSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignAssetSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignAssetSetsResponse
*/
remove: async (
campaignAssetSets: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignAssetSetsResponse > => {
const ops = this.buildOperations<
services.CampaignAssetSetOperation,
string
>(
"remove",
campaignAssetSets
);
const request = this.buildRequest<
services.CampaignAssetSetOperation,
services.IMutateCampaignAssetSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignAssetSetService.mutateCampaignAssetSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignAssetSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignBidModifierService
*/
public get campaignBidModifiers() {
const service = this.loadService<services.CampaignBidModifierService>("CampaignBidModifierServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignBidModifiersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignBidModifier in full detail
* @warning Don't use get in production!
* @returns resources.CampaignBidModifier
*/
get: async (resourceName: string): Promise<resources.CampaignBidModifier> => {
const request = new services.GetCampaignBidModifierRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignBidModifier(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignBidModifier
* @returns services.MutateCampaignBidModifiersResponse
*/
create: async (
campaignBidModifiers: (resources.ICampaignBidModifier | resources.CampaignBidModifier)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBidModifiersResponse > => {
const ops = this.buildOperations<
services.CampaignBidModifierOperation,
resources.ICampaignBidModifier
>(
"create",
campaignBidModifiers
);
const request = this.buildRequest<
services.CampaignBidModifierOperation,
services.IMutateCampaignBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBidModifierService.mutateCampaignBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignBidModifier
* @returns services.MutateCampaignBidModifiersResponse
*/
update: async (
campaignBidModifiers: (resources.ICampaignBidModifier | resources.CampaignBidModifier)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBidModifiersResponse > => {
const ops = this.buildOperations<
services.CampaignBidModifierOperation,
resources.ICampaignBidModifier
>(
"update",
campaignBidModifiers
// @ts-expect-error Static class type here is fine
, resources.CampaignBidModifier
);
const request = this.buildRequest<
services.CampaignBidModifierOperation,
services.IMutateCampaignBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBidModifierService.mutateCampaignBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignBidModifiersResponse
*/
remove: async (
campaignBidModifiers: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBidModifiersResponse > => {
const ops = this.buildOperations<
services.CampaignBidModifierOperation,
string
>(
"remove",
campaignBidModifiers
);
const request = this.buildRequest<
services.CampaignBidModifierOperation,
services.IMutateCampaignBidModifiersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBidModifierService.mutateCampaignBidModifiers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBidModifiers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignBudgetService
*/
public get campaignBudgets() {
const service = this.loadService<services.CampaignBudgetService>("CampaignBudgetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignBudgetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignBudget in full detail
* @warning Don't use get in production!
* @returns resources.CampaignBudget
*/
get: async (resourceName: string): Promise<resources.CampaignBudget> => {
const request = new services.GetCampaignBudgetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignBudget(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignBudget
* @returns services.MutateCampaignBudgetsResponse
*/
create: async (
campaignBudgets: (resources.ICampaignBudget | resources.CampaignBudget)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBudgetsResponse > => {
const ops = this.buildOperations<
services.CampaignBudgetOperation,
resources.ICampaignBudget
>(
"create",
campaignBudgets
);
const request = this.buildRequest<
services.CampaignBudgetOperation,
services.IMutateCampaignBudgetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBudgetService.mutateCampaignBudgets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBudgets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignBudget
* @returns services.MutateCampaignBudgetsResponse
*/
update: async (
campaignBudgets: (resources.ICampaignBudget | resources.CampaignBudget)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBudgetsResponse > => {
const ops = this.buildOperations<
services.CampaignBudgetOperation,
resources.ICampaignBudget
>(
"update",
campaignBudgets
// @ts-expect-error Static class type here is fine
, resources.CampaignBudget
);
const request = this.buildRequest<
services.CampaignBudgetOperation,
services.IMutateCampaignBudgetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBudgetService.mutateCampaignBudgets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBudgets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignBudgetsResponse
*/
remove: async (
campaignBudgets: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignBudgetsResponse > => {
const ops = this.buildOperations<
services.CampaignBudgetOperation,
string
>(
"remove",
campaignBudgets
);
const request = this.buildRequest<
services.CampaignBudgetOperation,
services.IMutateCampaignBudgetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignBudgetService.mutateCampaignBudgets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignBudgets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignConversionGoalService
*/
public get campaignConversionGoals() {
const service = this.loadService<services.CampaignConversionGoalService>("CampaignConversionGoalServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignConversionGoalsRequest, "validate_only">>
return {
/**
* @description update resources of type resources.ICampaignConversionGoal
* @returns services.MutateCampaignConversionGoalsResponse
*/
update: async (
campaignConversionGoals: (resources.ICampaignConversionGoal | resources.CampaignConversionGoal)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignConversionGoalsResponse > => {
const ops = this.buildOperations<
services.CampaignConversionGoalOperation,
resources.ICampaignConversionGoal
>(
"update",
campaignConversionGoals
// @ts-expect-error Static class type here is fine
, resources.CampaignConversionGoal
);
const request = this.buildRequest<
services.CampaignConversionGoalOperation,
services.IMutateCampaignConversionGoalsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignConversionGoalService.mutateCampaignConversionGoals",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignConversionGoals(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignCriterionService
*/
public get campaignCriteria() {
const service = this.loadService<services.CampaignCriterionService>("CampaignCriterionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignCriteriaRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignCriterion in full detail
* @warning Don't use get in production!
* @returns resources.CampaignCriterion
*/
get: async (resourceName: string): Promise<resources.CampaignCriterion> => {
const request = new services.GetCampaignCriterionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignCriterion(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignCriterion
* @returns services.MutateCampaignCriteriaResponse
*/
create: async (
campaignCriteria: (resources.ICampaignCriterion | resources.CampaignCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignCriteriaResponse > => {
const ops = this.buildOperations<
services.CampaignCriterionOperation,
resources.ICampaignCriterion
>(
"create",
campaignCriteria
);
const request = this.buildRequest<
services.CampaignCriterionOperation,
services.IMutateCampaignCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignCriterionService.mutateCampaignCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignCriterion
* @returns services.MutateCampaignCriteriaResponse
*/
update: async (
campaignCriteria: (resources.ICampaignCriterion | resources.CampaignCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignCriteriaResponse > => {
const ops = this.buildOperations<
services.CampaignCriterionOperation,
resources.ICampaignCriterion
>(
"update",
campaignCriteria
// @ts-expect-error Static class type here is fine
, resources.CampaignCriterion
);
const request = this.buildRequest<
services.CampaignCriterionOperation,
services.IMutateCampaignCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignCriterionService.mutateCampaignCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignCriteriaResponse
*/
remove: async (
campaignCriteria: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignCriteriaResponse > => {
const ops = this.buildOperations<
services.CampaignCriterionOperation,
string
>(
"remove",
campaignCriteria
);
const request = this.buildRequest<
services.CampaignCriterionOperation,
services.IMutateCampaignCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignCriterionService.mutateCampaignCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignCustomizerService
*/
public get campaignCustomizers() {
const service = this.loadService<services.CampaignCustomizerService>("CampaignCustomizerServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignCustomizersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.ICampaignCustomizer
* @returns services.MutateCampaignCustomizersResponse
*/
create: async (
campaignCustomizers: (resources.ICampaignCustomizer | resources.CampaignCustomizer)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignCustomizersResponse > => {
const ops = this.buildOperations<
services.CampaignCustomizerOperation,
resources.ICampaignCustomizer
>(
"create",
campaignCustomizers
);
const request = this.buildRequest<
services.CampaignCustomizerOperation,
services.IMutateCampaignCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignCustomizerService.mutateCampaignCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignCustomizersResponse
*/
remove: async (
campaignCustomizers: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignCustomizersResponse > => {
const ops = this.buildOperations<
services.CampaignCustomizerOperation,
string
>(
"remove",
campaignCustomizers
);
const request = this.buildRequest<
services.CampaignCustomizerOperation,
services.IMutateCampaignCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignCustomizerService.mutateCampaignCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignDraftService
*/
public get campaignDrafts() {
const service = this.loadService<services.CampaignDraftService>("CampaignDraftServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignDraftsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignDraft in full detail
* @warning Don't use get in production!
* @returns resources.CampaignDraft
*/
get: async (resourceName: string): Promise<resources.CampaignDraft> => {
const request = new services.GetCampaignDraftRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignDraft(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignDraft
* @returns services.MutateCampaignDraftsResponse
*/
create: async (
campaignDrafts: (resources.ICampaignDraft | resources.CampaignDraft)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignDraftsResponse > => {
const ops = this.buildOperations<
services.CampaignDraftOperation,
resources.ICampaignDraft
>(
"create",
campaignDrafts
);
const request = this.buildRequest<
services.CampaignDraftOperation,
services.IMutateCampaignDraftsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignDraftService.mutateCampaignDrafts",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignDrafts(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignDraft
* @returns services.MutateCampaignDraftsResponse
*/
update: async (
campaignDrafts: (resources.ICampaignDraft | resources.CampaignDraft)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignDraftsResponse > => {
const ops = this.buildOperations<
services.CampaignDraftOperation,
resources.ICampaignDraft
>(
"update",
campaignDrafts
// @ts-expect-error Static class type here is fine
, resources.CampaignDraft
);
const request = this.buildRequest<
services.CampaignDraftOperation,
services.IMutateCampaignDraftsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignDraftService.mutateCampaignDrafts",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignDrafts(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignDraftsResponse
*/
remove: async (
campaignDrafts: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignDraftsResponse > => {
const ops = this.buildOperations<
services.CampaignDraftOperation,
string
>(
"remove",
campaignDrafts
);
const request = this.buildRequest<
services.CampaignDraftOperation,
services.IMutateCampaignDraftsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignDraftService.mutateCampaignDrafts",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignDrafts(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignDraftService#promotecampaigndraft
*/
promoteCampaignDraft: async (request: services.PromoteCampaignDraftRequest): Promise<longrunning.Operation> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.promoteCampaignDraft(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignDraftService#listcampaigndraftasyncerrors
*/
listCampaignDraftAsyncErrors: async (request: services.ListCampaignDraftAsyncErrorsRequest): Promise<services.ListCampaignDraftAsyncErrorsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listCampaignDraftAsyncErrors(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService
*/
public get campaignExperiments() {
const service = this.loadService<services.CampaignExperimentService>("CampaignExperimentServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignExperimentsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignExperiment in full detail
* @warning Don't use get in production!
* @returns resources.CampaignExperiment
*/
get: async (resourceName: string): Promise<resources.CampaignExperiment> => {
const request = new services.GetCampaignExperimentRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignExperiment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService#createcampaignexperiment
*/
createCampaignExperiment: async (request: services.CreateCampaignExperimentRequest): Promise<longrunning.Operation> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.createCampaignExperiment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.ICampaignExperiment
* @returns services.MutateCampaignExperimentsResponse
*/
update: async (
campaignExperiments: (resources.ICampaignExperiment | resources.CampaignExperiment)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignExperimentsResponse > => {
const ops = this.buildOperations<
services.CampaignExperimentOperation,
resources.ICampaignExperiment
>(
"update",
campaignExperiments
// @ts-expect-error Static class type here is fine
, resources.CampaignExperiment
);
const request = this.buildRequest<
services.CampaignExperimentOperation,
services.IMutateCampaignExperimentsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignExperimentService.mutateCampaignExperiments",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignExperiments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignExperimentsResponse
*/
remove: async (
campaignExperiments: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignExperimentsResponse > => {
const ops = this.buildOperations<
services.CampaignExperimentOperation,
string
>(
"remove",
campaignExperiments
);
const request = this.buildRequest<
services.CampaignExperimentOperation,
services.IMutateCampaignExperimentsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignExperimentService.mutateCampaignExperiments",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignExperiments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService#graduatecampaignexperiment
*/
graduateCampaignExperiment: async (request: services.GraduateCampaignExperimentRequest): Promise<services.GraduateCampaignExperimentResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.graduateCampaignExperiment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService#promotecampaignexperiment
*/
promoteCampaignExperiment: async (request: services.PromoteCampaignExperimentRequest): Promise<longrunning.Operation> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.promoteCampaignExperiment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService#endcampaignexperiment
*/
endCampaignExperiment: async (request: services.EndCampaignExperimentRequest): Promise<protobuf.Empty> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.endCampaignExperiment(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExperimentService#listcampaignexperimentasyncerrors
*/
listCampaignExperimentAsyncErrors: async (request: services.ListCampaignExperimentAsyncErrorsRequest): Promise<services.ListCampaignExperimentAsyncErrorsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listCampaignExperimentAsyncErrors(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignExtensionSettingService
*/
public get campaignExtensionSettings() {
const service = this.loadService<services.CampaignExtensionSettingService>("CampaignExtensionSettingServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignExtensionSettingsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignExtensionSetting in full detail
* @warning Don't use get in production!
* @returns resources.CampaignExtensionSetting
*/
get: async (resourceName: string): Promise<resources.CampaignExtensionSetting> => {
const request = new services.GetCampaignExtensionSettingRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignExtensionSetting(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignExtensionSetting
* @returns services.MutateCampaignExtensionSettingsResponse
*/
create: async (
campaignExtensionSettings: (resources.ICampaignExtensionSetting | resources.CampaignExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CampaignExtensionSettingOperation,
resources.ICampaignExtensionSetting
>(
"create",
campaignExtensionSettings
);
const request = this.buildRequest<
services.CampaignExtensionSettingOperation,
services.IMutateCampaignExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignExtensionSettingService.mutateCampaignExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignExtensionSetting
* @returns services.MutateCampaignExtensionSettingsResponse
*/
update: async (
campaignExtensionSettings: (resources.ICampaignExtensionSetting | resources.CampaignExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CampaignExtensionSettingOperation,
resources.ICampaignExtensionSetting
>(
"update",
campaignExtensionSettings
// @ts-expect-error Static class type here is fine
, resources.CampaignExtensionSetting
);
const request = this.buildRequest<
services.CampaignExtensionSettingOperation,
services.IMutateCampaignExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignExtensionSettingService.mutateCampaignExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignExtensionSettingsResponse
*/
remove: async (
campaignExtensionSettings: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CampaignExtensionSettingOperation,
string
>(
"remove",
campaignExtensionSettings
);
const request = this.buildRequest<
services.CampaignExtensionSettingOperation,
services.IMutateCampaignExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignExtensionSettingService.mutateCampaignExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignFeedService
*/
public get campaignFeeds() {
const service = this.loadService<services.CampaignFeedService>("CampaignFeedServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignFeedsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignFeed in full detail
* @warning Don't use get in production!
* @returns resources.CampaignFeed
*/
get: async (resourceName: string): Promise<resources.CampaignFeed> => {
const request = new services.GetCampaignFeedRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignFeed(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignFeed
* @returns services.MutateCampaignFeedsResponse
*/
create: async (
campaignFeeds: (resources.ICampaignFeed | resources.CampaignFeed)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignFeedsResponse > => {
const ops = this.buildOperations<
services.CampaignFeedOperation,
resources.ICampaignFeed
>(
"create",
campaignFeeds
);
const request = this.buildRequest<
services.CampaignFeedOperation,
services.IMutateCampaignFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignFeedService.mutateCampaignFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaignFeed
* @returns services.MutateCampaignFeedsResponse
*/
update: async (
campaignFeeds: (resources.ICampaignFeed | resources.CampaignFeed)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignFeedsResponse > => {
const ops = this.buildOperations<
services.CampaignFeedOperation,
resources.ICampaignFeed
>(
"update",
campaignFeeds
// @ts-expect-error Static class type here is fine
, resources.CampaignFeed
);
const request = this.buildRequest<
services.CampaignFeedOperation,
services.IMutateCampaignFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignFeedService.mutateCampaignFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignFeedsResponse
*/
remove: async (
campaignFeeds: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignFeedsResponse > => {
const ops = this.buildOperations<
services.CampaignFeedOperation,
string
>(
"remove",
campaignFeeds
);
const request = this.buildRequest<
services.CampaignFeedOperation,
services.IMutateCampaignFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignFeedService.mutateCampaignFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignLabelService
*/
public get campaignLabels() {
const service = this.loadService<services.CampaignLabelService>("CampaignLabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignLabelsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.CampaignLabel in full detail
* @warning Don't use get in production!
* @returns resources.CampaignLabel
*/
get: async (resourceName: string): Promise<resources.CampaignLabel> => {
const request = new services.GetCampaignLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignLabel
* @returns services.MutateCampaignLabelsResponse
*/
create: async (
campaignLabels: (resources.ICampaignLabel | resources.CampaignLabel)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignLabelsResponse > => {
const ops = this.buildOperations<
services.CampaignLabelOperation,
resources.ICampaignLabel
>(
"create",
campaignLabels
);
const request = this.buildRequest<
services.CampaignLabelOperation,
services.IMutateCampaignLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignLabelService.mutateCampaignLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignLabelsResponse
*/
remove: async (
campaignLabels: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignLabelsResponse > => {
const ops = this.buildOperations<
services.CampaignLabelOperation,
string
>(
"remove",
campaignLabels
);
const request = this.buildRequest<
services.CampaignLabelOperation,
services.IMutateCampaignLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignLabelService.mutateCampaignLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignService
*/
public get campaigns() {
const service = this.loadService<services.CampaignService>("CampaignServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.Campaign in full detail
* @warning Don't use get in production!
* @returns resources.Campaign
*/
get: async (resourceName: string): Promise<resources.Campaign> => {
const request = new services.GetCampaignRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaign(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaign
* @returns services.MutateCampaignsResponse
*/
create: async (
campaigns: (resources.ICampaign | resources.Campaign)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignsResponse > => {
const ops = this.buildOperations<
services.CampaignOperation,
resources.ICampaign
>(
"create",
campaigns
);
const request = this.buildRequest<
services.CampaignOperation,
services.IMutateCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignService.mutateCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICampaign
* @returns services.MutateCampaignsResponse
*/
update: async (
campaigns: (resources.ICampaign | resources.Campaign)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignsResponse > => {
const ops = this.buildOperations<
services.CampaignOperation,
resources.ICampaign
>(
"update",
campaigns
// @ts-expect-error Static class type here is fine
, resources.Campaign
);
const request = this.buildRequest<
services.CampaignOperation,
services.IMutateCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignService.mutateCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignsResponse
*/
remove: async (
campaigns: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignsResponse > => {
const ops = this.buildOperations<
services.CampaignOperation,
string
>(
"remove",
campaigns
);
const request = this.buildRequest<
services.CampaignOperation,
services.IMutateCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignService.mutateCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignSharedSetService
*/
public get campaignSharedSets() {
const service = this.loadService<services.CampaignSharedSetService>("CampaignSharedSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCampaignSharedSetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CampaignSharedSet in full detail
* @warning Don't use get in production!
* @returns resources.CampaignSharedSet
*/
get: async (resourceName: string): Promise<resources.CampaignSharedSet> => {
const request = new services.GetCampaignSharedSetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignSharedSet(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICampaignSharedSet
* @returns services.MutateCampaignSharedSetsResponse
*/
create: async (
campaignSharedSets: (resources.ICampaignSharedSet | resources.CampaignSharedSet)[] ,
options?: MutateOptions
): Promise<services.MutateCampaignSharedSetsResponse > => {
const ops = this.buildOperations<
services.CampaignSharedSetOperation,
resources.ICampaignSharedSet
>(
"create",
campaignSharedSets
);
const request = this.buildRequest<
services.CampaignSharedSetOperation,
services.IMutateCampaignSharedSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignSharedSetService.mutateCampaignSharedSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignSharedSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCampaignSharedSetsResponse
*/
remove: async (
campaignSharedSets: string[] ,
options?: MutateOptions
): Promise<services.MutateCampaignSharedSetsResponse > => {
const ops = this.buildOperations<
services.CampaignSharedSetOperation,
string
>(
"remove",
campaignSharedSets
);
const request = this.buildRequest<
services.CampaignSharedSetOperation,
services.IMutateCampaignSharedSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CampaignSharedSetService.mutateCampaignSharedSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCampaignSharedSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionActionService
*/
public get conversionActions() {
const service = this.loadService<services.ConversionActionService>("ConversionActionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateConversionActionsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.ConversionAction in full detail
* @warning Don't use get in production!
* @returns resources.ConversionAction
*/
get: async (resourceName: string): Promise<resources.ConversionAction> => {
const request = new services.GetConversionActionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getConversionAction(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IConversionAction
* @returns services.MutateConversionActionsResponse
*/
create: async (
conversionActions: (resources.IConversionAction | resources.ConversionAction)[] ,
options?: MutateOptions
): Promise<services.MutateConversionActionsResponse > => {
const ops = this.buildOperations<
services.ConversionActionOperation,
resources.IConversionAction
>(
"create",
conversionActions
);
const request = this.buildRequest<
services.ConversionActionOperation,
services.IMutateConversionActionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionActionService.mutateConversionActions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionActions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IConversionAction
* @returns services.MutateConversionActionsResponse
*/
update: async (
conversionActions: (resources.IConversionAction | resources.ConversionAction)[] ,
options?: MutateOptions
): Promise<services.MutateConversionActionsResponse > => {
const ops = this.buildOperations<
services.ConversionActionOperation,
resources.IConversionAction
>(
"update",
conversionActions
// @ts-expect-error Static class type here is fine
, resources.ConversionAction
);
const request = this.buildRequest<
services.ConversionActionOperation,
services.IMutateConversionActionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionActionService.mutateConversionActions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionActions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateConversionActionsResponse
*/
remove: async (
conversionActions: string[] ,
options?: MutateOptions
): Promise<services.MutateConversionActionsResponse > => {
const ops = this.buildOperations<
services.ConversionActionOperation,
string
>(
"remove",
conversionActions
);
const request = this.buildRequest<
services.ConversionActionOperation,
services.IMutateConversionActionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionActionService.mutateConversionActions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionActions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionCustomVariableService
*/
public get conversionCustomVariables() {
const service = this.loadService<services.ConversionCustomVariableService>("ConversionCustomVariableServiceClient")
type MutateOptions = Partial<Pick<services.IMutateConversionCustomVariablesRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.ConversionCustomVariable in full detail
* @warning Don't use get in production!
* @returns resources.ConversionCustomVariable
*/
get: async (resourceName: string): Promise<resources.ConversionCustomVariable> => {
const request = new services.GetConversionCustomVariableRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getConversionCustomVariable(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IConversionCustomVariable
* @returns services.MutateConversionCustomVariablesResponse
*/
create: async (
conversionCustomVariables: (resources.IConversionCustomVariable | resources.ConversionCustomVariable)[] ,
options?: MutateOptions
): Promise<services.MutateConversionCustomVariablesResponse > => {
const ops = this.buildOperations<
services.ConversionCustomVariableOperation,
resources.IConversionCustomVariable
>(
"create",
conversionCustomVariables
);
const request = this.buildRequest<
services.ConversionCustomVariableOperation,
services.IMutateConversionCustomVariablesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionCustomVariableService.mutateConversionCustomVariables",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionCustomVariables(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IConversionCustomVariable
* @returns services.MutateConversionCustomVariablesResponse
*/
update: async (
conversionCustomVariables: (resources.IConversionCustomVariable | resources.ConversionCustomVariable)[] ,
options?: MutateOptions
): Promise<services.MutateConversionCustomVariablesResponse > => {
const ops = this.buildOperations<
services.ConversionCustomVariableOperation,
resources.IConversionCustomVariable
>(
"update",
conversionCustomVariables
// @ts-expect-error Static class type here is fine
, resources.ConversionCustomVariable
);
const request = this.buildRequest<
services.ConversionCustomVariableOperation,
services.IMutateConversionCustomVariablesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionCustomVariableService.mutateConversionCustomVariables",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionCustomVariables(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionGoalCampaignConfigService
*/
public get conversionGoalCampaignConfigs() {
const service = this.loadService<services.ConversionGoalCampaignConfigService>("ConversionGoalCampaignConfigServiceClient")
type MutateOptions = Partial<Pick<services.IMutateConversionGoalCampaignConfigsRequest, "validate_only"|"response_content_type">>
return {
/**
* @description update resources of type resources.IConversionGoalCampaignConfig
* @returns services.MutateConversionGoalCampaignConfigsResponse
*/
update: async (
conversionGoalCampaignConfigs: (resources.IConversionGoalCampaignConfig | resources.ConversionGoalCampaignConfig)[] ,
options?: MutateOptions
): Promise<services.MutateConversionGoalCampaignConfigsResponse > => {
const ops = this.buildOperations<
services.ConversionGoalCampaignConfigOperation,
resources.IConversionGoalCampaignConfig
>(
"update",
conversionGoalCampaignConfigs
// @ts-expect-error Static class type here is fine
, resources.ConversionGoalCampaignConfig
);
const request = this.buildRequest<
services.ConversionGoalCampaignConfigOperation,
services.IMutateConversionGoalCampaignConfigsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionGoalCampaignConfigService.mutateConversionGoalCampaignConfigs",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionGoalCampaignConfigs(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionValueRuleService
*/
public get conversionValueRules() {
const service = this.loadService<services.ConversionValueRuleService>("ConversionValueRuleServiceClient")
type MutateOptions = Partial<Pick<services.IMutateConversionValueRulesRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.ConversionValueRule in full detail
* @warning Don't use get in production!
* @returns resources.ConversionValueRule
*/
get: async (resourceName: string): Promise<resources.ConversionValueRule> => {
const request = new services.GetConversionValueRuleRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getConversionValueRule(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IConversionValueRule
* @returns services.MutateConversionValueRulesResponse
*/
create: async (
conversionValueRules: (resources.IConversionValueRule | resources.ConversionValueRule)[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRulesResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleOperation,
resources.IConversionValueRule
>(
"create",
conversionValueRules
);
const request = this.buildRequest<
services.ConversionValueRuleOperation,
services.IMutateConversionValueRulesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleService.mutateConversionValueRules",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRules(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IConversionValueRule
* @returns services.MutateConversionValueRulesResponse
*/
update: async (
conversionValueRules: (resources.IConversionValueRule | resources.ConversionValueRule)[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRulesResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleOperation,
resources.IConversionValueRule
>(
"update",
conversionValueRules
// @ts-expect-error Static class type here is fine
, resources.ConversionValueRule
);
const request = this.buildRequest<
services.ConversionValueRuleOperation,
services.IMutateConversionValueRulesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleService.mutateConversionValueRules",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRules(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateConversionValueRulesResponse
*/
remove: async (
conversionValueRules: string[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRulesResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleOperation,
string
>(
"remove",
conversionValueRules
);
const request = this.buildRequest<
services.ConversionValueRuleOperation,
services.IMutateConversionValueRulesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleService.mutateConversionValueRules",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRules(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionValueRuleSetService
*/
public get conversionValueRuleSets() {
const service = this.loadService<services.ConversionValueRuleSetService>("ConversionValueRuleSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateConversionValueRuleSetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.ConversionValueRuleSet in full detail
* @warning Don't use get in production!
* @returns resources.ConversionValueRuleSet
*/
get: async (resourceName: string): Promise<resources.ConversionValueRuleSet> => {
const request = new services.GetConversionValueRuleSetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getConversionValueRuleSet(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IConversionValueRuleSet
* @returns services.MutateConversionValueRuleSetsResponse
*/
create: async (
conversionValueRuleSets: (resources.IConversionValueRuleSet | resources.ConversionValueRuleSet)[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRuleSetsResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleSetOperation,
resources.IConversionValueRuleSet
>(
"create",
conversionValueRuleSets
);
const request = this.buildRequest<
services.ConversionValueRuleSetOperation,
services.IMutateConversionValueRuleSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleSetService.mutateConversionValueRuleSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRuleSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IConversionValueRuleSet
* @returns services.MutateConversionValueRuleSetsResponse
*/
update: async (
conversionValueRuleSets: (resources.IConversionValueRuleSet | resources.ConversionValueRuleSet)[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRuleSetsResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleSetOperation,
resources.IConversionValueRuleSet
>(
"update",
conversionValueRuleSets
// @ts-expect-error Static class type here is fine
, resources.ConversionValueRuleSet
);
const request = this.buildRequest<
services.ConversionValueRuleSetOperation,
services.IMutateConversionValueRuleSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleSetService.mutateConversionValueRuleSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRuleSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateConversionValueRuleSetsResponse
*/
remove: async (
conversionValueRuleSets: string[] ,
options?: MutateOptions
): Promise<services.MutateConversionValueRuleSetsResponse > => {
const ops = this.buildOperations<
services.ConversionValueRuleSetOperation,
string
>(
"remove",
conversionValueRuleSets
);
const request = this.buildRequest<
services.ConversionValueRuleSetOperation,
services.IMutateConversionValueRuleSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ConversionValueRuleSetService.mutateConversionValueRuleSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateConversionValueRuleSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomConversionGoalService
*/
public get customConversionGoals() {
const service = this.loadService<services.CustomConversionGoalService>("CustomConversionGoalServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomConversionGoalsRequest, "validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.ICustomConversionGoal
* @returns services.MutateCustomConversionGoalsResponse
*/
create: async (
customConversionGoals: (resources.ICustomConversionGoal | resources.CustomConversionGoal)[] ,
options?: MutateOptions
): Promise<services.MutateCustomConversionGoalsResponse > => {
const ops = this.buildOperations<
services.CustomConversionGoalOperation,
resources.ICustomConversionGoal
>(
"create",
customConversionGoals
);
const request = this.buildRequest<
services.CustomConversionGoalOperation,
services.IMutateCustomConversionGoalsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomConversionGoalService.mutateCustomConversionGoals",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomConversionGoals(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomConversionGoal
* @returns services.MutateCustomConversionGoalsResponse
*/
update: async (
customConversionGoals: (resources.ICustomConversionGoal | resources.CustomConversionGoal)[] ,
options?: MutateOptions
): Promise<services.MutateCustomConversionGoalsResponse > => {
const ops = this.buildOperations<
services.CustomConversionGoalOperation,
resources.ICustomConversionGoal
>(
"update",
customConversionGoals
// @ts-expect-error Static class type here is fine
, resources.CustomConversionGoal
);
const request = this.buildRequest<
services.CustomConversionGoalOperation,
services.IMutateCustomConversionGoalsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomConversionGoalService.mutateCustomConversionGoals",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomConversionGoals(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomConversionGoalsResponse
*/
remove: async (
customConversionGoals: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomConversionGoalsResponse > => {
const ops = this.buildOperations<
services.CustomConversionGoalOperation,
string
>(
"remove",
customConversionGoals
);
const request = this.buildRequest<
services.CustomConversionGoalOperation,
services.IMutateCustomConversionGoalsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomConversionGoalService.mutateCustomConversionGoals",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomConversionGoals(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerAssetService
*/
public get customerAssets() {
const service = this.loadService<services.CustomerAssetService>("CustomerAssetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerAssetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CustomerAsset in full detail
* @warning Don't use get in production!
* @returns resources.CustomerAsset
*/
get: async (resourceName: string): Promise<resources.CustomerAsset> => {
const request = new services.GetCustomerAssetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerAsset(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerAsset
* @returns services.MutateCustomerAssetsResponse
*/
create: async (
customerAssets: (resources.ICustomerAsset | resources.CustomerAsset)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerAssetsResponse > => {
const ops = this.buildOperations<
services.CustomerAssetOperation,
resources.ICustomerAsset
>(
"create",
customerAssets
);
const request = this.buildRequest<
services.CustomerAssetOperation,
services.IMutateCustomerAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerAssetService.mutateCustomerAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomerAsset
* @returns services.MutateCustomerAssetsResponse
*/
update: async (
customerAssets: (resources.ICustomerAsset | resources.CustomerAsset)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerAssetsResponse > => {
const ops = this.buildOperations<
services.CustomerAssetOperation,
resources.ICustomerAsset
>(
"update",
customerAssets
// @ts-expect-error Static class type here is fine
, resources.CustomerAsset
);
const request = this.buildRequest<
services.CustomerAssetOperation,
services.IMutateCustomerAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerAssetService.mutateCustomerAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerAssetsResponse
*/
remove: async (
customerAssets: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerAssetsResponse > => {
const ops = this.buildOperations<
services.CustomerAssetOperation,
string
>(
"remove",
customerAssets
);
const request = this.buildRequest<
services.CustomerAssetOperation,
services.IMutateCustomerAssetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerAssetService.mutateCustomerAssets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerAssets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerConversionGoalService
*/
public get customerConversionGoals() {
const service = this.loadService<services.CustomerConversionGoalService>("CustomerConversionGoalServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerConversionGoalsRequest, "validate_only">>
return {
/**
* @description update resources of type resources.ICustomerConversionGoal
* @returns services.MutateCustomerConversionGoalsResponse
*/
update: async (
customerConversionGoals: (resources.ICustomerConversionGoal | resources.CustomerConversionGoal)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerConversionGoalsResponse > => {
const ops = this.buildOperations<
services.CustomerConversionGoalOperation,
resources.ICustomerConversionGoal
>(
"update",
customerConversionGoals
// @ts-expect-error Static class type here is fine
, resources.CustomerConversionGoal
);
const request = this.buildRequest<
services.CustomerConversionGoalOperation,
services.IMutateCustomerConversionGoalsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerConversionGoalService.mutateCustomerConversionGoals",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerConversionGoals(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerCustomizerService
*/
public get customerCustomizers() {
const service = this.loadService<services.CustomerCustomizerService>("CustomerCustomizerServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerCustomizersRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.ICustomerCustomizer
* @returns services.MutateCustomerCustomizersResponse
*/
create: async (
customerCustomizers: (resources.ICustomerCustomizer | resources.CustomerCustomizer)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerCustomizersResponse > => {
const ops = this.buildOperations<
services.CustomerCustomizerOperation,
resources.ICustomerCustomizer
>(
"create",
customerCustomizers
);
const request = this.buildRequest<
services.CustomerCustomizerOperation,
services.IMutateCustomerCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerCustomizerService.mutateCustomerCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerCustomizersResponse
*/
remove: async (
customerCustomizers: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerCustomizersResponse > => {
const ops = this.buildOperations<
services.CustomerCustomizerOperation,
string
>(
"remove",
customerCustomizers
);
const request = this.buildRequest<
services.CustomerCustomizerOperation,
services.IMutateCustomerCustomizersRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerCustomizerService.mutateCustomerCustomizers",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerCustomizers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerExtensionSettingService
*/
public get customerExtensionSettings() {
const service = this.loadService<services.CustomerExtensionSettingService>("CustomerExtensionSettingServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerExtensionSettingsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CustomerExtensionSetting in full detail
* @warning Don't use get in production!
* @returns resources.CustomerExtensionSetting
*/
get: async (resourceName: string): Promise<resources.CustomerExtensionSetting> => {
const request = new services.GetCustomerExtensionSettingRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerExtensionSetting(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerExtensionSetting
* @returns services.MutateCustomerExtensionSettingsResponse
*/
create: async (
customerExtensionSettings: (resources.ICustomerExtensionSetting | resources.CustomerExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CustomerExtensionSettingOperation,
resources.ICustomerExtensionSetting
>(
"create",
customerExtensionSettings
);
const request = this.buildRequest<
services.CustomerExtensionSettingOperation,
services.IMutateCustomerExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerExtensionSettingService.mutateCustomerExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomerExtensionSetting
* @returns services.MutateCustomerExtensionSettingsResponse
*/
update: async (
customerExtensionSettings: (resources.ICustomerExtensionSetting | resources.CustomerExtensionSetting)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CustomerExtensionSettingOperation,
resources.ICustomerExtensionSetting
>(
"update",
customerExtensionSettings
// @ts-expect-error Static class type here is fine
, resources.CustomerExtensionSetting
);
const request = this.buildRequest<
services.CustomerExtensionSettingOperation,
services.IMutateCustomerExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerExtensionSettingService.mutateCustomerExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerExtensionSettingsResponse
*/
remove: async (
customerExtensionSettings: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerExtensionSettingsResponse > => {
const ops = this.buildOperations<
services.CustomerExtensionSettingOperation,
string
>(
"remove",
customerExtensionSettings
);
const request = this.buildRequest<
services.CustomerExtensionSettingOperation,
services.IMutateCustomerExtensionSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerExtensionSettingService.mutateCustomerExtensionSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerExtensionSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerFeedService
*/
public get customerFeeds() {
const service = this.loadService<services.CustomerFeedService>("CustomerFeedServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerFeedsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CustomerFeed in full detail
* @warning Don't use get in production!
* @returns resources.CustomerFeed
*/
get: async (resourceName: string): Promise<resources.CustomerFeed> => {
const request = new services.GetCustomerFeedRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerFeed(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerFeed
* @returns services.MutateCustomerFeedsResponse
*/
create: async (
customerFeeds: (resources.ICustomerFeed | resources.CustomerFeed)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerFeedsResponse > => {
const ops = this.buildOperations<
services.CustomerFeedOperation,
resources.ICustomerFeed
>(
"create",
customerFeeds
);
const request = this.buildRequest<
services.CustomerFeedOperation,
services.IMutateCustomerFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerFeedService.mutateCustomerFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomerFeed
* @returns services.MutateCustomerFeedsResponse
*/
update: async (
customerFeeds: (resources.ICustomerFeed | resources.CustomerFeed)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerFeedsResponse > => {
const ops = this.buildOperations<
services.CustomerFeedOperation,
resources.ICustomerFeed
>(
"update",
customerFeeds
// @ts-expect-error Static class type here is fine
, resources.CustomerFeed
);
const request = this.buildRequest<
services.CustomerFeedOperation,
services.IMutateCustomerFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerFeedService.mutateCustomerFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerFeedsResponse
*/
remove: async (
customerFeeds: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerFeedsResponse > => {
const ops = this.buildOperations<
services.CustomerFeedOperation,
string
>(
"remove",
customerFeeds
);
const request = this.buildRequest<
services.CustomerFeedOperation,
services.IMutateCustomerFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerFeedService.mutateCustomerFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerLabelService
*/
public get customerLabels() {
const service = this.loadService<services.CustomerLabelService>("CustomerLabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerLabelsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.CustomerLabel in full detail
* @warning Don't use get in production!
* @returns resources.CustomerLabel
*/
get: async (resourceName: string): Promise<resources.CustomerLabel> => {
const request = new services.GetCustomerLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerLabel
* @returns services.MutateCustomerLabelsResponse
*/
create: async (
customerLabels: (resources.ICustomerLabel | resources.CustomerLabel)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerLabelsResponse > => {
const ops = this.buildOperations<
services.CustomerLabelOperation,
resources.ICustomerLabel
>(
"create",
customerLabels
);
const request = this.buildRequest<
services.CustomerLabelOperation,
services.IMutateCustomerLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerLabelService.mutateCustomerLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerLabelsResponse
*/
remove: async (
customerLabels: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerLabelsResponse > => {
const ops = this.buildOperations<
services.CustomerLabelOperation,
string
>(
"remove",
customerLabels
);
const request = this.buildRequest<
services.CustomerLabelOperation,
services.IMutateCustomerLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerLabelService.mutateCustomerLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerNegativeCriterionService
*/
public get customerNegativeCriteria() {
const service = this.loadService<services.CustomerNegativeCriterionService>("CustomerNegativeCriterionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerNegativeCriteriaRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.CustomerNegativeCriterion in full detail
* @warning Don't use get in production!
* @returns resources.CustomerNegativeCriterion
*/
get: async (resourceName: string): Promise<resources.CustomerNegativeCriterion> => {
const request = new services.GetCustomerNegativeCriterionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerNegativeCriterion(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerNegativeCriterion
* @returns services.MutateCustomerNegativeCriteriaResponse
*/
create: async (
customerNegativeCriteria: (resources.ICustomerNegativeCriterion | resources.CustomerNegativeCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerNegativeCriteriaResponse > => {
const ops = this.buildOperations<
services.CustomerNegativeCriterionOperation,
resources.ICustomerNegativeCriterion
>(
"create",
customerNegativeCriteria
);
const request = this.buildRequest<
services.CustomerNegativeCriterionOperation,
services.IMutateCustomerNegativeCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerNegativeCriterionService.mutateCustomerNegativeCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerNegativeCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerNegativeCriteriaResponse
*/
remove: async (
customerNegativeCriteria: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerNegativeCriteriaResponse > => {
const ops = this.buildOperations<
services.CustomerNegativeCriterionOperation,
string
>(
"remove",
customerNegativeCriteria
);
const request = this.buildRequest<
services.CustomerNegativeCriterionOperation,
services.IMutateCustomerNegativeCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerNegativeCriterionService.mutateCustomerNegativeCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerNegativeCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerService
*/
public get customers() {
const service = this.loadService<services.CustomerService>("CustomerServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerRequest, "validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.Customer in full detail
* @warning Don't use get in production!
* @returns resources.Customer
*/
get: async (resourceName: string): Promise<resources.Customer> => {
const request = new services.GetCustomerRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomer(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.ICustomer
* @returns services.MutateCustomerResponse
*/
update: async (
customers: (resources.ICustomer | resources.Customer)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerResponse > => {
const ops = this.buildOperations<
services.CustomerOperation,
resources.ICustomer
>(
"update",
customers
// @ts-expect-error Static class type here is fine
, resources.Customer
);
const request = this.buildRequest<
services.CustomerOperation,
services.IMutateCustomerRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerService.mutateCustomer",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomer(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerService#listaccessiblecustomers
*/
listAccessibleCustomers: async (request: services.ListAccessibleCustomersRequest): Promise<services.ListAccessibleCustomersResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listAccessibleCustomers(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerService#createcustomerclient
*/
createCustomerClient: async (request: services.CreateCustomerClientRequest): Promise<services.CreateCustomerClientResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.createCustomerClient(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomizerAttributeService
*/
public get customizerAttributes() {
const service = this.loadService<services.CustomizerAttributeService>("CustomizerAttributeServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomizerAttributesRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description create resources of type resources.ICustomizerAttribute
* @returns services.MutateCustomizerAttributesResponse
*/
create: async (
customizerAttributes: (resources.ICustomizerAttribute | resources.CustomizerAttribute)[] ,
options?: MutateOptions
): Promise<services.MutateCustomizerAttributesResponse > => {
const ops = this.buildOperations<
services.CustomizerAttributeOperation,
resources.ICustomizerAttribute
>(
"create",
customizerAttributes
);
const request = this.buildRequest<
services.CustomizerAttributeOperation,
services.IMutateCustomizerAttributesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomizerAttributeService.mutateCustomizerAttributes",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomizerAttributes(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomizerAttributesResponse
*/
remove: async (
customizerAttributes: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomizerAttributesResponse > => {
const ops = this.buildOperations<
services.CustomizerAttributeOperation,
string
>(
"remove",
customizerAttributes
);
const request = this.buildRequest<
services.CustomizerAttributeOperation,
services.IMutateCustomizerAttributesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomizerAttributeService.mutateCustomizerAttributes",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomizerAttributes(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ExtensionFeedItemService
*/
public get extensionFeedItems() {
const service = this.loadService<services.ExtensionFeedItemService>("ExtensionFeedItemServiceClient")
type MutateOptions = Partial<Pick<services.IMutateExtensionFeedItemsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.ExtensionFeedItem in full detail
* @warning Don't use get in production!
* @returns resources.ExtensionFeedItem
*/
get: async (resourceName: string): Promise<resources.ExtensionFeedItem> => {
const request = new services.GetExtensionFeedItemRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getExtensionFeedItem(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IExtensionFeedItem
* @returns services.MutateExtensionFeedItemsResponse
*/
create: async (
extensionFeedItems: (resources.IExtensionFeedItem | resources.ExtensionFeedItem)[] ,
options?: MutateOptions
): Promise<services.MutateExtensionFeedItemsResponse > => {
const ops = this.buildOperations<
services.ExtensionFeedItemOperation,
resources.IExtensionFeedItem
>(
"create",
extensionFeedItems
);
const request = this.buildRequest<
services.ExtensionFeedItemOperation,
services.IMutateExtensionFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ExtensionFeedItemService.mutateExtensionFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateExtensionFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IExtensionFeedItem
* @returns services.MutateExtensionFeedItemsResponse
*/
update: async (
extensionFeedItems: (resources.IExtensionFeedItem | resources.ExtensionFeedItem)[] ,
options?: MutateOptions
): Promise<services.MutateExtensionFeedItemsResponse > => {
const ops = this.buildOperations<
services.ExtensionFeedItemOperation,
resources.IExtensionFeedItem
>(
"update",
extensionFeedItems
// @ts-expect-error Static class type here is fine
, resources.ExtensionFeedItem
);
const request = this.buildRequest<
services.ExtensionFeedItemOperation,
services.IMutateExtensionFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ExtensionFeedItemService.mutateExtensionFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateExtensionFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateExtensionFeedItemsResponse
*/
remove: async (
extensionFeedItems: string[] ,
options?: MutateOptions
): Promise<services.MutateExtensionFeedItemsResponse > => {
const ops = this.buildOperations<
services.ExtensionFeedItemOperation,
string
>(
"remove",
extensionFeedItems
);
const request = this.buildRequest<
services.ExtensionFeedItemOperation,
services.IMutateExtensionFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "ExtensionFeedItemService.mutateExtensionFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateExtensionFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedItemService
*/
public get feedItems() {
const service = this.loadService<services.FeedItemService>("FeedItemServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedItemsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.FeedItem in full detail
* @warning Don't use get in production!
* @returns resources.FeedItem
*/
get: async (resourceName: string): Promise<resources.FeedItem> => {
const request = new services.GetFeedItemRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedItem(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeedItem
* @returns services.MutateFeedItemsResponse
*/
create: async (
feedItems: (resources.IFeedItem | resources.FeedItem)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemsResponse > => {
const ops = this.buildOperations<
services.FeedItemOperation,
resources.IFeedItem
>(
"create",
feedItems
);
const request = this.buildRequest<
services.FeedItemOperation,
services.IMutateFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemService.mutateFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IFeedItem
* @returns services.MutateFeedItemsResponse
*/
update: async (
feedItems: (resources.IFeedItem | resources.FeedItem)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemsResponse > => {
const ops = this.buildOperations<
services.FeedItemOperation,
resources.IFeedItem
>(
"update",
feedItems
// @ts-expect-error Static class type here is fine
, resources.FeedItem
);
const request = this.buildRequest<
services.FeedItemOperation,
services.IMutateFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemService.mutateFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedItemsResponse
*/
remove: async (
feedItems: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemsResponse > => {
const ops = this.buildOperations<
services.FeedItemOperation,
string
>(
"remove",
feedItems
);
const request = this.buildRequest<
services.FeedItemOperation,
services.IMutateFeedItemsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemService.mutateFeedItems",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItems(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedItemSetLinkService
*/
public get feedItemSetLinks() {
const service = this.loadService<services.FeedItemSetLinkService>("FeedItemSetLinkServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedItemSetLinksRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.FeedItemSetLink in full detail
* @warning Don't use get in production!
* @returns resources.FeedItemSetLink
*/
get: async (resourceName: string): Promise<resources.FeedItemSetLink> => {
const request = new services.GetFeedItemSetLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedItemSetLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeedItemSetLink
* @returns services.MutateFeedItemSetLinksResponse
*/
create: async (
feedItemSetLinks: (resources.IFeedItemSetLink | resources.FeedItemSetLink)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemSetLinksResponse > => {
const ops = this.buildOperations<
services.FeedItemSetLinkOperation,
resources.IFeedItemSetLink
>(
"create",
feedItemSetLinks
);
const request = this.buildRequest<
services.FeedItemSetLinkOperation,
services.IMutateFeedItemSetLinksRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemSetLinkService.mutateFeedItemSetLinks",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemSetLinks(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedItemSetLinksResponse
*/
remove: async (
feedItemSetLinks: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemSetLinksResponse > => {
const ops = this.buildOperations<
services.FeedItemSetLinkOperation,
string
>(
"remove",
feedItemSetLinks
);
const request = this.buildRequest<
services.FeedItemSetLinkOperation,
services.IMutateFeedItemSetLinksRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemSetLinkService.mutateFeedItemSetLinks",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemSetLinks(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedItemSetService
*/
public get feedItemSets() {
const service = this.loadService<services.FeedItemSetService>("FeedItemSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedItemSetsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.FeedItemSet in full detail
* @warning Don't use get in production!
* @returns resources.FeedItemSet
*/
get: async (resourceName: string): Promise<resources.FeedItemSet> => {
const request = new services.GetFeedItemSetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedItemSet(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeedItemSet
* @returns services.MutateFeedItemSetsResponse
*/
create: async (
feedItemSets: (resources.IFeedItemSet | resources.FeedItemSet)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemSetsResponse > => {
const ops = this.buildOperations<
services.FeedItemSetOperation,
resources.IFeedItemSet
>(
"create",
feedItemSets
);
const request = this.buildRequest<
services.FeedItemSetOperation,
services.IMutateFeedItemSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemSetService.mutateFeedItemSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IFeedItemSet
* @returns services.MutateFeedItemSetsResponse
*/
update: async (
feedItemSets: (resources.IFeedItemSet | resources.FeedItemSet)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemSetsResponse > => {
const ops = this.buildOperations<
services.FeedItemSetOperation,
resources.IFeedItemSet
>(
"update",
feedItemSets
// @ts-expect-error Static class type here is fine
, resources.FeedItemSet
);
const request = this.buildRequest<
services.FeedItemSetOperation,
services.IMutateFeedItemSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemSetService.mutateFeedItemSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedItemSetsResponse
*/
remove: async (
feedItemSets: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemSetsResponse > => {
const ops = this.buildOperations<
services.FeedItemSetOperation,
string
>(
"remove",
feedItemSets
);
const request = this.buildRequest<
services.FeedItemSetOperation,
services.IMutateFeedItemSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemSetService.mutateFeedItemSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedItemTargetService
*/
public get feedItemTargets() {
const service = this.loadService<services.FeedItemTargetService>("FeedItemTargetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedItemTargetsRequest, "partial_failure"|"response_content_type"|"validate_only">>
return {
/**
* @description Retrieve a resources.FeedItemTarget in full detail
* @warning Don't use get in production!
* @returns resources.FeedItemTarget
*/
get: async (resourceName: string): Promise<resources.FeedItemTarget> => {
const request = new services.GetFeedItemTargetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedItemTarget(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeedItemTarget
* @returns services.MutateFeedItemTargetsResponse
*/
create: async (
feedItemTargets: (resources.IFeedItemTarget | resources.FeedItemTarget)[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemTargetsResponse > => {
const ops = this.buildOperations<
services.FeedItemTargetOperation,
resources.IFeedItemTarget
>(
"create",
feedItemTargets
);
const request = this.buildRequest<
services.FeedItemTargetOperation,
services.IMutateFeedItemTargetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemTargetService.mutateFeedItemTargets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemTargets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedItemTargetsResponse
*/
remove: async (
feedItemTargets: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedItemTargetsResponse > => {
const ops = this.buildOperations<
services.FeedItemTargetOperation,
string
>(
"remove",
feedItemTargets
);
const request = this.buildRequest<
services.FeedItemTargetOperation,
services.IMutateFeedItemTargetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedItemTargetService.mutateFeedItemTargets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedItemTargets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedMappingService
*/
public get feedMappings() {
const service = this.loadService<services.FeedMappingService>("FeedMappingServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedMappingsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.FeedMapping in full detail
* @warning Don't use get in production!
* @returns resources.FeedMapping
*/
get: async (resourceName: string): Promise<resources.FeedMapping> => {
const request = new services.GetFeedMappingRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedMapping(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeedMapping
* @returns services.MutateFeedMappingsResponse
*/
create: async (
feedMappings: (resources.IFeedMapping | resources.FeedMapping)[] ,
options?: MutateOptions
): Promise<services.MutateFeedMappingsResponse > => {
const ops = this.buildOperations<
services.FeedMappingOperation,
resources.IFeedMapping
>(
"create",
feedMappings
);
const request = this.buildRequest<
services.FeedMappingOperation,
services.IMutateFeedMappingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedMappingService.mutateFeedMappings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedMappings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedMappingsResponse
*/
remove: async (
feedMappings: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedMappingsResponse > => {
const ops = this.buildOperations<
services.FeedMappingOperation,
string
>(
"remove",
feedMappings
);
const request = this.buildRequest<
services.FeedMappingOperation,
services.IMutateFeedMappingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedMappingService.mutateFeedMappings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeedMappings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedService
*/
public get feeds() {
const service = this.loadService<services.FeedService>("FeedServiceClient")
type MutateOptions = Partial<Pick<services.IMutateFeedsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.Feed in full detail
* @warning Don't use get in production!
* @returns resources.Feed
*/
get: async (resourceName: string): Promise<resources.Feed> => {
const request = new services.GetFeedRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeed(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IFeed
* @returns services.MutateFeedsResponse
*/
create: async (
feeds: (resources.IFeed | resources.Feed)[] ,
options?: MutateOptions
): Promise<services.MutateFeedsResponse > => {
const ops = this.buildOperations<
services.FeedOperation,
resources.IFeed
>(
"create",
feeds
);
const request = this.buildRequest<
services.FeedOperation,
services.IMutateFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedService.mutateFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IFeed
* @returns services.MutateFeedsResponse
*/
update: async (
feeds: (resources.IFeed | resources.Feed)[] ,
options?: MutateOptions
): Promise<services.MutateFeedsResponse > => {
const ops = this.buildOperations<
services.FeedOperation,
resources.IFeed
>(
"update",
feeds
// @ts-expect-error Static class type here is fine
, resources.Feed
);
const request = this.buildRequest<
services.FeedOperation,
services.IMutateFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedService.mutateFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateFeedsResponse
*/
remove: async (
feeds: string[] ,
options?: MutateOptions
): Promise<services.MutateFeedsResponse > => {
const ops = this.buildOperations<
services.FeedOperation,
string
>(
"remove",
feeds
);
const request = this.buildRequest<
services.FeedOperation,
services.IMutateFeedsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "FeedService.mutateFeeds",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateFeeds(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanAdGroupKeywordService
*/
public get keywordPlanAdGroupKeywords() {
const service = this.loadService<services.KeywordPlanAdGroupKeywordService>("KeywordPlanAdGroupKeywordServiceClient")
type MutateOptions = Partial<Pick<services.IMutateKeywordPlanAdGroupKeywordsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.KeywordPlanAdGroupKeyword in full detail
* @warning Don't use get in production!
* @returns resources.KeywordPlanAdGroupKeyword
*/
get: async (resourceName: string): Promise<resources.KeywordPlanAdGroupKeyword> => {
const request = new services.GetKeywordPlanAdGroupKeywordRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordPlanAdGroupKeyword(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IKeywordPlanAdGroupKeyword
* @returns services.MutateKeywordPlanAdGroupKeywordsResponse
*/
create: async (
keywordPlanAdGroupKeywords: (resources.IKeywordPlanAdGroupKeyword | resources.KeywordPlanAdGroupKeyword)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupKeywordOperation,
resources.IKeywordPlanAdGroupKeyword
>(
"create",
keywordPlanAdGroupKeywords
);
const request = this.buildRequest<
services.KeywordPlanAdGroupKeywordOperation,
services.IMutateKeywordPlanAdGroupKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupKeywordService.mutateKeywordPlanAdGroupKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroupKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IKeywordPlanAdGroupKeyword
* @returns services.MutateKeywordPlanAdGroupKeywordsResponse
*/
update: async (
keywordPlanAdGroupKeywords: (resources.IKeywordPlanAdGroupKeyword | resources.KeywordPlanAdGroupKeyword)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupKeywordOperation,
resources.IKeywordPlanAdGroupKeyword
>(
"update",
keywordPlanAdGroupKeywords
// @ts-expect-error Static class type here is fine
, resources.KeywordPlanAdGroupKeyword
);
const request = this.buildRequest<
services.KeywordPlanAdGroupKeywordOperation,
services.IMutateKeywordPlanAdGroupKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupKeywordService.mutateKeywordPlanAdGroupKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroupKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateKeywordPlanAdGroupKeywordsResponse
*/
remove: async (
keywordPlanAdGroupKeywords: string[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupKeywordOperation,
string
>(
"remove",
keywordPlanAdGroupKeywords
);
const request = this.buildRequest<
services.KeywordPlanAdGroupKeywordOperation,
services.IMutateKeywordPlanAdGroupKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupKeywordService.mutateKeywordPlanAdGroupKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroupKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanAdGroupService
*/
public get keywordPlanAdGroups() {
const service = this.loadService<services.KeywordPlanAdGroupService>("KeywordPlanAdGroupServiceClient")
type MutateOptions = Partial<Pick<services.IMutateKeywordPlanAdGroupsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.KeywordPlanAdGroup in full detail
* @warning Don't use get in production!
* @returns resources.KeywordPlanAdGroup
*/
get: async (resourceName: string): Promise<resources.KeywordPlanAdGroup> => {
const request = new services.GetKeywordPlanAdGroupRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordPlanAdGroup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IKeywordPlanAdGroup
* @returns services.MutateKeywordPlanAdGroupsResponse
*/
create: async (
keywordPlanAdGroups: (resources.IKeywordPlanAdGroup | resources.KeywordPlanAdGroup)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupOperation,
resources.IKeywordPlanAdGroup
>(
"create",
keywordPlanAdGroups
);
const request = this.buildRequest<
services.KeywordPlanAdGroupOperation,
services.IMutateKeywordPlanAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupService.mutateKeywordPlanAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IKeywordPlanAdGroup
* @returns services.MutateKeywordPlanAdGroupsResponse
*/
update: async (
keywordPlanAdGroups: (resources.IKeywordPlanAdGroup | resources.KeywordPlanAdGroup)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupOperation,
resources.IKeywordPlanAdGroup
>(
"update",
keywordPlanAdGroups
// @ts-expect-error Static class type here is fine
, resources.KeywordPlanAdGroup
);
const request = this.buildRequest<
services.KeywordPlanAdGroupOperation,
services.IMutateKeywordPlanAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupService.mutateKeywordPlanAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateKeywordPlanAdGroupsResponse
*/
remove: async (
keywordPlanAdGroups: string[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanAdGroupsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanAdGroupOperation,
string
>(
"remove",
keywordPlanAdGroups
);
const request = this.buildRequest<
services.KeywordPlanAdGroupOperation,
services.IMutateKeywordPlanAdGroupsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanAdGroupService.mutateKeywordPlanAdGroups",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanAdGroups(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanCampaignKeywordService
*/
public get keywordPlanCampaignKeywords() {
const service = this.loadService<services.KeywordPlanCampaignKeywordService>("KeywordPlanCampaignKeywordServiceClient")
type MutateOptions = Partial<Pick<services.IMutateKeywordPlanCampaignKeywordsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.KeywordPlanCampaignKeyword in full detail
* @warning Don't use get in production!
* @returns resources.KeywordPlanCampaignKeyword
*/
get: async (resourceName: string): Promise<resources.KeywordPlanCampaignKeyword> => {
const request = new services.GetKeywordPlanCampaignKeywordRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordPlanCampaignKeyword(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IKeywordPlanCampaignKeyword
* @returns services.MutateKeywordPlanCampaignKeywordsResponse
*/
create: async (
keywordPlanCampaignKeywords: (resources.IKeywordPlanCampaignKeyword | resources.KeywordPlanCampaignKeyword)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignKeywordOperation,
resources.IKeywordPlanCampaignKeyword
>(
"create",
keywordPlanCampaignKeywords
);
const request = this.buildRequest<
services.KeywordPlanCampaignKeywordOperation,
services.IMutateKeywordPlanCampaignKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignKeywordService.mutateKeywordPlanCampaignKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaignKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IKeywordPlanCampaignKeyword
* @returns services.MutateKeywordPlanCampaignKeywordsResponse
*/
update: async (
keywordPlanCampaignKeywords: (resources.IKeywordPlanCampaignKeyword | resources.KeywordPlanCampaignKeyword)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignKeywordOperation,
resources.IKeywordPlanCampaignKeyword
>(
"update",
keywordPlanCampaignKeywords
// @ts-expect-error Static class type here is fine
, resources.KeywordPlanCampaignKeyword
);
const request = this.buildRequest<
services.KeywordPlanCampaignKeywordOperation,
services.IMutateKeywordPlanCampaignKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignKeywordService.mutateKeywordPlanCampaignKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaignKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateKeywordPlanCampaignKeywordsResponse
*/
remove: async (
keywordPlanCampaignKeywords: string[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignKeywordsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignKeywordOperation,
string
>(
"remove",
keywordPlanCampaignKeywords
);
const request = this.buildRequest<
services.KeywordPlanCampaignKeywordOperation,
services.IMutateKeywordPlanCampaignKeywordsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignKeywordService.mutateKeywordPlanCampaignKeywords",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaignKeywords(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanCampaignService
*/
public get keywordPlanCampaigns() {
const service = this.loadService<services.KeywordPlanCampaignService>("KeywordPlanCampaignServiceClient")
type MutateOptions = Partial<Pick<services.IMutateKeywordPlanCampaignsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.KeywordPlanCampaign in full detail
* @warning Don't use get in production!
* @returns resources.KeywordPlanCampaign
*/
get: async (resourceName: string): Promise<resources.KeywordPlanCampaign> => {
const request = new services.GetKeywordPlanCampaignRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordPlanCampaign(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IKeywordPlanCampaign
* @returns services.MutateKeywordPlanCampaignsResponse
*/
create: async (
keywordPlanCampaigns: (resources.IKeywordPlanCampaign | resources.KeywordPlanCampaign)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignOperation,
resources.IKeywordPlanCampaign
>(
"create",
keywordPlanCampaigns
);
const request = this.buildRequest<
services.KeywordPlanCampaignOperation,
services.IMutateKeywordPlanCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignService.mutateKeywordPlanCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IKeywordPlanCampaign
* @returns services.MutateKeywordPlanCampaignsResponse
*/
update: async (
keywordPlanCampaigns: (resources.IKeywordPlanCampaign | resources.KeywordPlanCampaign)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignOperation,
resources.IKeywordPlanCampaign
>(
"update",
keywordPlanCampaigns
// @ts-expect-error Static class type here is fine
, resources.KeywordPlanCampaign
);
const request = this.buildRequest<
services.KeywordPlanCampaignOperation,
services.IMutateKeywordPlanCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignService.mutateKeywordPlanCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateKeywordPlanCampaignsResponse
*/
remove: async (
keywordPlanCampaigns: string[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlanCampaignsResponse > => {
const ops = this.buildOperations<
services.KeywordPlanCampaignOperation,
string
>(
"remove",
keywordPlanCampaigns
);
const request = this.buildRequest<
services.KeywordPlanCampaignOperation,
services.IMutateKeywordPlanCampaignsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanCampaignService.mutateKeywordPlanCampaigns",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlanCampaigns(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanService
*/
public get keywordPlans() {
const service = this.loadService<services.KeywordPlanService>("KeywordPlanServiceClient")
type MutateOptions = Partial<Pick<services.IMutateKeywordPlansRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.KeywordPlan in full detail
* @warning Don't use get in production!
* @returns resources.KeywordPlan
*/
get: async (resourceName: string): Promise<resources.KeywordPlan> => {
const request = new services.GetKeywordPlanRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordPlan(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IKeywordPlan
* @returns services.MutateKeywordPlansResponse
*/
create: async (
keywordPlans: (resources.IKeywordPlan | resources.KeywordPlan)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlansResponse > => {
const ops = this.buildOperations<
services.KeywordPlanOperation,
resources.IKeywordPlan
>(
"create",
keywordPlans
);
const request = this.buildRequest<
services.KeywordPlanOperation,
services.IMutateKeywordPlansRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanService.mutateKeywordPlans",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlans(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IKeywordPlan
* @returns services.MutateKeywordPlansResponse
*/
update: async (
keywordPlans: (resources.IKeywordPlan | resources.KeywordPlan)[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlansResponse > => {
const ops = this.buildOperations<
services.KeywordPlanOperation,
resources.IKeywordPlan
>(
"update",
keywordPlans
// @ts-expect-error Static class type here is fine
, resources.KeywordPlan
);
const request = this.buildRequest<
services.KeywordPlanOperation,
services.IMutateKeywordPlansRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanService.mutateKeywordPlans",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlans(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateKeywordPlansResponse
*/
remove: async (
keywordPlans: string[] ,
options?: MutateOptions
): Promise<services.MutateKeywordPlansResponse > => {
const ops = this.buildOperations<
services.KeywordPlanOperation,
string
>(
"remove",
keywordPlans
);
const request = this.buildRequest<
services.KeywordPlanOperation,
services.IMutateKeywordPlansRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "KeywordPlanService.mutateKeywordPlans",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateKeywordPlans(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanService#generateforecastcurve
*/
generateForecastCurve: async (request: services.GenerateForecastCurveRequest): Promise<services.GenerateForecastCurveResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateForecastCurve(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanService#generateforecasttimeseries
*/
generateForecastTimeSeries: async (request: services.GenerateForecastTimeSeriesRequest): Promise<services.GenerateForecastTimeSeriesResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateForecastTimeSeries(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanService#generateforecastmetrics
*/
generateForecastMetrics: async (request: services.GenerateForecastMetricsRequest): Promise<services.GenerateForecastMetricsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateForecastMetrics(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanService#generatehistoricalmetrics
*/
generateHistoricalMetrics: async (request: services.GenerateHistoricalMetricsRequest): Promise<services.GenerateHistoricalMetricsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateHistoricalMetrics(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/LabelService
*/
public get labels() {
const service = this.loadService<services.LabelService>("LabelServiceClient")
type MutateOptions = Partial<Pick<services.IMutateLabelsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.Label in full detail
* @warning Don't use get in production!
* @returns resources.Label
*/
get: async (resourceName: string): Promise<resources.Label> => {
const request = new services.GetLabelRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getLabel(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ILabel
* @returns services.MutateLabelsResponse
*/
create: async (
labels: (resources.ILabel | resources.Label)[] ,
options?: MutateOptions
): Promise<services.MutateLabelsResponse > => {
const ops = this.buildOperations<
services.LabelOperation,
resources.ILabel
>(
"create",
labels
);
const request = this.buildRequest<
services.LabelOperation,
services.IMutateLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "LabelService.mutateLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ILabel
* @returns services.MutateLabelsResponse
*/
update: async (
labels: (resources.ILabel | resources.Label)[] ,
options?: MutateOptions
): Promise<services.MutateLabelsResponse > => {
const ops = this.buildOperations<
services.LabelOperation,
resources.ILabel
>(
"update",
labels
// @ts-expect-error Static class type here is fine
, resources.Label
);
const request = this.buildRequest<
services.LabelOperation,
services.IMutateLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "LabelService.mutateLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateLabelsResponse
*/
remove: async (
labels: string[] ,
options?: MutateOptions
): Promise<services.MutateLabelsResponse > => {
const ops = this.buildOperations<
services.LabelOperation,
string
>(
"remove",
labels
);
const request = this.buildRequest<
services.LabelOperation,
services.IMutateLabelsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "LabelService.mutateLabels",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateLabels(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/MediaFileService
*/
public get mediaFiles() {
const service = this.loadService<services.MediaFileService>("MediaFileServiceClient")
type MutateOptions = Partial<Pick<services.IMutateMediaFilesRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.MediaFile in full detail
* @warning Don't use get in production!
* @returns resources.MediaFile
*/
get: async (resourceName: string): Promise<resources.MediaFile> => {
const request = new services.GetMediaFileRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getMediaFile(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IMediaFile
* @returns services.MutateMediaFilesResponse
*/
create: async (
mediaFiles: (resources.IMediaFile | resources.MediaFile)[] ,
options?: MutateOptions
): Promise<services.MutateMediaFilesResponse > => {
const ops = this.buildOperations<
services.MediaFileOperation,
resources.IMediaFile
>(
"create",
mediaFiles
);
const request = this.buildRequest<
services.MediaFileOperation,
services.IMutateMediaFilesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "MediaFileService.mutateMediaFiles",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateMediaFiles(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/RemarketingActionService
*/
public get remarketingActions() {
const service = this.loadService<services.RemarketingActionService>("RemarketingActionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateRemarketingActionsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.RemarketingAction in full detail
* @warning Don't use get in production!
* @returns resources.RemarketingAction
*/
get: async (resourceName: string): Promise<resources.RemarketingAction> => {
const request = new services.GetRemarketingActionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getRemarketingAction(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IRemarketingAction
* @returns services.MutateRemarketingActionsResponse
*/
create: async (
remarketingActions: (resources.IRemarketingAction | resources.RemarketingAction)[] ,
options?: MutateOptions
): Promise<services.MutateRemarketingActionsResponse > => {
const ops = this.buildOperations<
services.RemarketingActionOperation,
resources.IRemarketingAction
>(
"create",
remarketingActions
);
const request = this.buildRequest<
services.RemarketingActionOperation,
services.IMutateRemarketingActionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "RemarketingActionService.mutateRemarketingActions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateRemarketingActions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IRemarketingAction
* @returns services.MutateRemarketingActionsResponse
*/
update: async (
remarketingActions: (resources.IRemarketingAction | resources.RemarketingAction)[] ,
options?: MutateOptions
): Promise<services.MutateRemarketingActionsResponse > => {
const ops = this.buildOperations<
services.RemarketingActionOperation,
resources.IRemarketingAction
>(
"update",
remarketingActions
// @ts-expect-error Static class type here is fine
, resources.RemarketingAction
);
const request = this.buildRequest<
services.RemarketingActionOperation,
services.IMutateRemarketingActionsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "RemarketingActionService.mutateRemarketingActions",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateRemarketingActions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SharedCriterionService
*/
public get sharedCriteria() {
const service = this.loadService<services.SharedCriterionService>("SharedCriterionServiceClient")
type MutateOptions = Partial<Pick<services.IMutateSharedCriteriaRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.SharedCriterion in full detail
* @warning Don't use get in production!
* @returns resources.SharedCriterion
*/
get: async (resourceName: string): Promise<resources.SharedCriterion> => {
const request = new services.GetSharedCriterionRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getSharedCriterion(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ISharedCriterion
* @returns services.MutateSharedCriteriaResponse
*/
create: async (
sharedCriteria: (resources.ISharedCriterion | resources.SharedCriterion)[] ,
options?: MutateOptions
): Promise<services.MutateSharedCriteriaResponse > => {
const ops = this.buildOperations<
services.SharedCriterionOperation,
resources.ISharedCriterion
>(
"create",
sharedCriteria
);
const request = this.buildRequest<
services.SharedCriterionOperation,
services.IMutateSharedCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SharedCriterionService.mutateSharedCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSharedCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateSharedCriteriaResponse
*/
remove: async (
sharedCriteria: string[] ,
options?: MutateOptions
): Promise<services.MutateSharedCriteriaResponse > => {
const ops = this.buildOperations<
services.SharedCriterionOperation,
string
>(
"remove",
sharedCriteria
);
const request = this.buildRequest<
services.SharedCriterionOperation,
services.IMutateSharedCriteriaRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SharedCriterionService.mutateSharedCriteria",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSharedCriteria(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SharedSetService
*/
public get sharedSets() {
const service = this.loadService<services.SharedSetService>("SharedSetServiceClient")
type MutateOptions = Partial<Pick<services.IMutateSharedSetsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.SharedSet in full detail
* @warning Don't use get in production!
* @returns resources.SharedSet
*/
get: async (resourceName: string): Promise<resources.SharedSet> => {
const request = new services.GetSharedSetRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getSharedSet(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ISharedSet
* @returns services.MutateSharedSetsResponse
*/
create: async (
sharedSets: (resources.ISharedSet | resources.SharedSet)[] ,
options?: MutateOptions
): Promise<services.MutateSharedSetsResponse > => {
const ops = this.buildOperations<
services.SharedSetOperation,
resources.ISharedSet
>(
"create",
sharedSets
);
const request = this.buildRequest<
services.SharedSetOperation,
services.IMutateSharedSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SharedSetService.mutateSharedSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSharedSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ISharedSet
* @returns services.MutateSharedSetsResponse
*/
update: async (
sharedSets: (resources.ISharedSet | resources.SharedSet)[] ,
options?: MutateOptions
): Promise<services.MutateSharedSetsResponse > => {
const ops = this.buildOperations<
services.SharedSetOperation,
resources.ISharedSet
>(
"update",
sharedSets
// @ts-expect-error Static class type here is fine
, resources.SharedSet
);
const request = this.buildRequest<
services.SharedSetOperation,
services.IMutateSharedSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SharedSetService.mutateSharedSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSharedSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateSharedSetsResponse
*/
remove: async (
sharedSets: string[] ,
options?: MutateOptions
): Promise<services.MutateSharedSetsResponse > => {
const ops = this.buildOperations<
services.SharedSetOperation,
string
>(
"remove",
sharedSets
);
const request = this.buildRequest<
services.SharedSetOperation,
services.IMutateSharedSetsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SharedSetService.mutateSharedSets",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSharedSets(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSettingService
*/
public get smartCampaignSettings() {
const service = this.loadService<services.SmartCampaignSettingService>("SmartCampaignSettingServiceClient")
type MutateOptions = Partial<Pick<services.IMutateSmartCampaignSettingsRequest, "partial_failure"|"validate_only"|"response_content_type">>
return {
/**
* @description Retrieve a resources.SmartCampaignSetting in full detail
* @warning Don't use get in production!
* @returns resources.SmartCampaignSetting
*/
get: async (resourceName: string): Promise<resources.SmartCampaignSetting> => {
const request = new services.GetSmartCampaignSettingRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getSmartCampaignSetting(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.ISmartCampaignSetting
* @returns services.MutateSmartCampaignSettingsResponse
*/
update: async (
smartCampaignSettings: (resources.ISmartCampaignSetting | resources.SmartCampaignSetting)[] ,
options?: MutateOptions
): Promise<services.MutateSmartCampaignSettingsResponse > => {
const ops = this.buildOperations<
services.SmartCampaignSettingOperation,
resources.ISmartCampaignSetting
>(
"update",
smartCampaignSettings
// @ts-expect-error Static class type here is fine
, resources.SmartCampaignSetting
);
const request = this.buildRequest<
services.SmartCampaignSettingOperation,
services.IMutateSmartCampaignSettingsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "SmartCampaignSettingService.mutateSmartCampaignSettings",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateSmartCampaignSettings(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/UserListService
*/
public get userLists() {
const service = this.loadService<services.UserListService>("UserListServiceClient")
type MutateOptions = Partial<Pick<services.IMutateUserListsRequest, "partial_failure"|"validate_only">>
return {
/**
* @description Retrieve a resources.UserList in full detail
* @warning Don't use get in production!
* @returns resources.UserList
*/
get: async (resourceName: string): Promise<resources.UserList> => {
const request = new services.GetUserListRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getUserList(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IUserList
* @returns services.MutateUserListsResponse
*/
create: async (
userLists: (resources.IUserList | resources.UserList)[] ,
options?: MutateOptions
): Promise<services.MutateUserListsResponse > => {
const ops = this.buildOperations<
services.UserListOperation,
resources.IUserList
>(
"create",
userLists
);
const request = this.buildRequest<
services.UserListOperation,
services.IMutateUserListsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "UserListService.mutateUserLists",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateUserLists(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.IUserList
* @returns services.MutateUserListsResponse
*/
update: async (
userLists: (resources.IUserList | resources.UserList)[] ,
options?: MutateOptions
): Promise<services.MutateUserListsResponse > => {
const ops = this.buildOperations<
services.UserListOperation,
resources.IUserList
>(
"update",
userLists
// @ts-expect-error Static class type here is fine
, resources.UserList
);
const request = this.buildRequest<
services.UserListOperation,
services.IMutateUserListsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "UserListService.mutateUserLists",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateUserLists(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateUserListsResponse
*/
remove: async (
userLists: string[] ,
options?: MutateOptions
): Promise<services.MutateUserListsResponse > => {
const ops = this.buildOperations<
services.UserListOperation,
string
>(
"remove",
userLists
);
const request = this.buildRequest<
services.UserListOperation,
services.IMutateUserListsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "UserListService.mutateUserLists",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateUserLists(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response: this.decodePartialFailureError(response),
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return this.decodePartialFailureError(response);
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BiddingStrategySimulationService
*/
public get biddingStrategySimulations() {
const service = this.loadService<services.BiddingStrategySimulationService>("BiddingStrategySimulationServiceClient")
return {
/**
* @description Retrieve a resources.BiddingStrategySimulation in full detail
* @warning Don't use get in production!
* @returns resources.BiddingStrategySimulation
*/
get: async (resourceName: string): Promise<resources.BiddingStrategySimulation> => {
const request = new services.GetBiddingStrategySimulationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBiddingStrategySimulation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/BillingSetupService
*/
public get billingSetups() {
const service = this.loadService<services.BillingSetupService>("BillingSetupServiceClient")
type MutateOptions = never
return {
/**
* @description Retrieve a resources.BillingSetup in full detail
* @warning Don't use get in production!
* @returns resources.BillingSetup
*/
get: async (resourceName: string): Promise<resources.BillingSetup> => {
const request = new services.GetBillingSetupRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getBillingSetup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.IBillingSetup
* @returns services.MutateBillingSetupResponse
*/
create: async (
billingSetups: (resources.IBillingSetup | resources.BillingSetup)[] ,
options?: MutateOptions
): Promise<services.MutateBillingSetupResponse > => {
const ops = this.buildOperations<
services.BillingSetupOperation,
resources.IBillingSetup
>(
"create",
billingSetups
);
const request = this.buildRequest<
services.BillingSetupOperation,
services.IMutateBillingSetupRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BillingSetupService.mutateBillingSetup",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBillingSetup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateBillingSetupResponse
*/
remove: async (
billingSetups: string[] ,
options?: MutateOptions
): Promise<services.MutateBillingSetupResponse > => {
const ops = this.buildOperations<
services.BillingSetupOperation,
string
>(
"remove",
billingSetups
);
const request = this.buildRequest<
services.BillingSetupOperation,
services.IMutateBillingSetupRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "BillingSetupService.mutateBillingSetup",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateBillingSetup(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignAudienceViewService
*/
public get campaignAudienceViews() {
const service = this.loadService<services.CampaignAudienceViewService>("CampaignAudienceViewServiceClient")
return {
/**
* @description Retrieve a resources.CampaignAudienceView in full detail
* @warning Don't use get in production!
* @returns resources.CampaignAudienceView
*/
get: async (resourceName: string): Promise<resources.CampaignAudienceView> => {
const request = new services.GetCampaignAudienceViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignAudienceView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignCriterionSimulationService
*/
public get campaignCriterionSimulations() {
const service = this.loadService<services.CampaignCriterionSimulationService>("CampaignCriterionSimulationServiceClient")
return {
/**
* @description Retrieve a resources.CampaignCriterionSimulation in full detail
* @warning Don't use get in production!
* @returns resources.CampaignCriterionSimulation
*/
get: async (resourceName: string): Promise<resources.CampaignCriterionSimulation> => {
const request = new services.GetCampaignCriterionSimulationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignCriterionSimulation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CampaignSimulationService
*/
public get campaignSimulations() {
const service = this.loadService<services.CampaignSimulationService>("CampaignSimulationServiceClient")
return {
/**
* @description Retrieve a resources.CampaignSimulation in full detail
* @warning Don't use get in production!
* @returns resources.CampaignSimulation
*/
get: async (resourceName: string): Promise<resources.CampaignSimulation> => {
const request = new services.GetCampaignSimulationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCampaignSimulation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CarrierConstantService
*/
public get carrierConstants() {
const service = this.loadService<services.CarrierConstantService>("CarrierConstantServiceClient")
return {
/**
* @description Retrieve a resources.CarrierConstant in full detail
* @warning Don't use get in production!
* @returns resources.CarrierConstant
*/
get: async (resourceName: string): Promise<resources.CarrierConstant> => {
const request = new services.GetCarrierConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCarrierConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ChangeStatusService
*/
public get changeStatuses() {
const service = this.loadService<services.ChangeStatusService>("ChangeStatusServiceClient")
return {
/**
* @description Retrieve a resources.ChangeStatus in full detail
* @warning Don't use get in production!
* @returns resources.ChangeStatus
*/
get: async (resourceName: string): Promise<resources.ChangeStatus> => {
const request = new services.GetChangeStatusRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getChangeStatus(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ClickViewService
*/
public get clickViews() {
const service = this.loadService<services.ClickViewService>("ClickViewServiceClient")
return {
/**
* @description Retrieve a resources.ClickView in full detail
* @warning Don't use get in production!
* @returns resources.ClickView
*/
get: async (resourceName: string): Promise<resources.ClickView> => {
const request = new services.GetClickViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getClickView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CombinedAudienceService
*/
public get combinedAudiences() {
const service = this.loadService<services.CombinedAudienceService>("CombinedAudienceServiceClient")
return {
/**
* @description Retrieve a resources.CombinedAudience in full detail
* @warning Don't use get in production!
* @returns resources.CombinedAudience
*/
get: async (resourceName: string): Promise<resources.CombinedAudience> => {
const request = new services.GetCombinedAudienceRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCombinedAudience(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionAdjustmentUploadService
*/
public get conversionAdjustmentUploads() {
const service = this.loadService<services.ConversionAdjustmentUploadService>("ConversionAdjustmentUploadServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionAdjustmentUploadService#uploadconversionadjustments
*/
uploadConversionAdjustments: async (request: services.UploadConversionAdjustmentsRequest): Promise<services.UploadConversionAdjustmentsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.uploadConversionAdjustments(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionUploadService
*/
public get conversionUploads() {
const service = this.loadService<services.ConversionUploadService>("ConversionUploadServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionUploadService#uploadclickconversions
*/
uploadClickConversions: async (request: services.UploadClickConversionsRequest): Promise<services.UploadClickConversionsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.uploadClickConversions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ConversionUploadService#uploadcallconversions
*/
uploadCallConversions: async (request: services.UploadCallConversionsRequest): Promise<services.UploadCallConversionsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.uploadCallConversions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CurrencyConstantService
*/
public get currencyConstants() {
const service = this.loadService<services.CurrencyConstantService>("CurrencyConstantServiceClient")
return {
/**
* @description Retrieve a resources.CurrencyConstant in full detail
* @warning Don't use get in production!
* @returns resources.CurrencyConstant
*/
get: async (resourceName: string): Promise<resources.CurrencyConstant> => {
const request = new services.GetCurrencyConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCurrencyConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomAudienceService
*/
public get customAudiences() {
const service = this.loadService<services.CustomAudienceService>("CustomAudienceServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomAudiencesRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.CustomAudience in full detail
* @warning Don't use get in production!
* @returns resources.CustomAudience
*/
get: async (resourceName: string): Promise<resources.CustomAudience> => {
const request = new services.GetCustomAudienceRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomAudience(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomAudience
* @returns services.MutateCustomAudiencesResponse
*/
create: async (
customAudiences: (resources.ICustomAudience | resources.CustomAudience)[] ,
options?: MutateOptions
): Promise<services.MutateCustomAudiencesResponse > => {
const ops = this.buildOperations<
services.CustomAudienceOperation,
resources.ICustomAudience
>(
"create",
customAudiences
);
const request = this.buildRequest<
services.CustomAudienceOperation,
services.IMutateCustomAudiencesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomAudienceService.mutateCustomAudiences",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomAudiences(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomAudience
* @returns services.MutateCustomAudiencesResponse
*/
update: async (
customAudiences: (resources.ICustomAudience | resources.CustomAudience)[] ,
options?: MutateOptions
): Promise<services.MutateCustomAudiencesResponse > => {
const ops = this.buildOperations<
services.CustomAudienceOperation,
resources.ICustomAudience
>(
"update",
customAudiences
// @ts-expect-error Static class type here is fine
, resources.CustomAudience
);
const request = this.buildRequest<
services.CustomAudienceOperation,
services.IMutateCustomAudiencesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomAudienceService.mutateCustomAudiences",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomAudiences(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomAudiencesResponse
*/
remove: async (
customAudiences: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomAudiencesResponse > => {
const ops = this.buildOperations<
services.CustomAudienceOperation,
string
>(
"remove",
customAudiences
);
const request = this.buildRequest<
services.CustomAudienceOperation,
services.IMutateCustomAudiencesRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomAudienceService.mutateCustomAudiences",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomAudiences(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomInterestService
*/
public get customInterests() {
const service = this.loadService<services.CustomInterestService>("CustomInterestServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomInterestsRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.CustomInterest in full detail
* @warning Don't use get in production!
* @returns resources.CustomInterest
*/
get: async (resourceName: string): Promise<resources.CustomInterest> => {
const request = new services.GetCustomInterestRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomInterest(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomInterest
* @returns services.MutateCustomInterestsResponse
*/
create: async (
customInterests: (resources.ICustomInterest | resources.CustomInterest)[] ,
options?: MutateOptions
): Promise<services.MutateCustomInterestsResponse > => {
const ops = this.buildOperations<
services.CustomInterestOperation,
resources.ICustomInterest
>(
"create",
customInterests
);
const request = this.buildRequest<
services.CustomInterestOperation,
services.IMutateCustomInterestsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomInterestService.mutateCustomInterests",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomInterests(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomInterest
* @returns services.MutateCustomInterestsResponse
*/
update: async (
customInterests: (resources.ICustomInterest | resources.CustomInterest)[] ,
options?: MutateOptions
): Promise<services.MutateCustomInterestsResponse > => {
const ops = this.buildOperations<
services.CustomInterestOperation,
resources.ICustomInterest
>(
"update",
customInterests
// @ts-expect-error Static class type here is fine
, resources.CustomInterest
);
const request = this.buildRequest<
services.CustomInterestOperation,
services.IMutateCustomInterestsRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomInterestService.mutateCustomInterests",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomInterests(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerClientLinkService
*/
public get customerClientLinks() {
const service = this.loadService<services.CustomerClientLinkService>("CustomerClientLinkServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerClientLinkRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.CustomerClientLink in full detail
* @warning Don't use get in production!
* @returns resources.CustomerClientLink
*/
get: async (resourceName: string): Promise<resources.CustomerClientLink> => {
const request = new services.GetCustomerClientLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerClientLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerClientLink
* @returns services.MutateCustomerClientLinkResponse
*/
create: async (
customerClientLinks: (resources.ICustomerClientLink | resources.CustomerClientLink)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerClientLinkResponse > => {
const ops = this.buildOperations<
services.CustomerClientLinkOperation,
resources.ICustomerClientLink
>(
"create",
customerClientLinks
);
const request = this.buildRequest<
services.CustomerClientLinkOperation,
services.IMutateCustomerClientLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerClientLinkService.mutateCustomerClientLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerClientLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description update resources of type resources.ICustomerClientLink
* @returns services.MutateCustomerClientLinkResponse
*/
update: async (
customerClientLinks: (resources.ICustomerClientLink | resources.CustomerClientLink)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerClientLinkResponse > => {
const ops = this.buildOperations<
services.CustomerClientLinkOperation,
resources.ICustomerClientLink
>(
"update",
customerClientLinks
// @ts-expect-error Static class type here is fine
, resources.CustomerClientLink
);
const request = this.buildRequest<
services.CustomerClientLinkOperation,
services.IMutateCustomerClientLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerClientLinkService.mutateCustomerClientLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerClientLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerClientService
*/
public get customerClients() {
const service = this.loadService<services.CustomerClientService>("CustomerClientServiceClient")
return {
/**
* @description Retrieve a resources.CustomerClient in full detail
* @warning Don't use get in production!
* @returns resources.CustomerClient
*/
get: async (resourceName: string): Promise<resources.CustomerClient> => {
const request = new services.GetCustomerClientRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerClient(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerManagerLinkService
*/
public get customerManagerLinks() {
const service = this.loadService<services.CustomerManagerLinkService>("CustomerManagerLinkServiceClient")
type MutateOptions = Partial<Pick<services.IMutateCustomerManagerLinkRequest, "validate_only">>
return {
/**
* @description Retrieve a resources.CustomerManagerLink in full detail
* @warning Don't use get in production!
* @returns resources.CustomerManagerLink
*/
get: async (resourceName: string): Promise<resources.CustomerManagerLink> => {
const request = new services.GetCustomerManagerLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerManagerLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.ICustomerManagerLink
* @returns services.MutateCustomerManagerLinkResponse
*/
update: async (
customerManagerLinks: (resources.ICustomerManagerLink | resources.CustomerManagerLink)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerManagerLinkResponse > => {
const ops = this.buildOperations<
services.CustomerManagerLinkOperation,
resources.ICustomerManagerLink
>(
"update",
customerManagerLinks
// @ts-expect-error Static class type here is fine
, resources.CustomerManagerLink
);
const request = this.buildRequest<
services.CustomerManagerLinkOperation,
services.IMutateCustomerManagerLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerManagerLinkService.mutateCustomerManagerLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerManagerLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerManagerLinkService#movemanagerlink
*/
moveManagerLink: async (request: services.MoveManagerLinkRequest): Promise<services.MoveManagerLinkResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.moveManagerLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerUserAccessInvitationService
*/
public get customerUserAccessInvitations() {
const service = this.loadService<services.CustomerUserAccessInvitationService>("CustomerUserAccessInvitationServiceClient")
type MutateOptions = never
return {
/**
* @description Retrieve a resources.CustomerUserAccessInvitation in full detail
* @warning Don't use get in production!
* @returns resources.CustomerUserAccessInvitation
*/
get: async (resourceName: string): Promise<resources.CustomerUserAccessInvitation> => {
const request = new services.GetCustomerUserAccessInvitationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerUserAccessInvitation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description create resources of type resources.ICustomerUserAccessInvitation
* @returns services.MutateCustomerUserAccessInvitationResponse
*/
create: async (
customerUserAccessInvitations: (resources.ICustomerUserAccessInvitation | resources.CustomerUserAccessInvitation)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerUserAccessInvitationResponse > => {
const ops = this.buildOperations<
services.CustomerUserAccessInvitationOperation,
resources.ICustomerUserAccessInvitation
>(
"create",
customerUserAccessInvitations
);
const request = this.buildRequest<
services.CustomerUserAccessInvitationOperation,
services.IMutateCustomerUserAccessInvitationRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerUserAccessInvitationService.mutateCustomerUserAccessInvitation",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerUserAccessInvitation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerUserAccessInvitationResponse
*/
remove: async (
customerUserAccessInvitations: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerUserAccessInvitationResponse > => {
const ops = this.buildOperations<
services.CustomerUserAccessInvitationOperation,
string
>(
"remove",
customerUserAccessInvitations
);
const request = this.buildRequest<
services.CustomerUserAccessInvitationOperation,
services.IMutateCustomerUserAccessInvitationRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerUserAccessInvitationService.mutateCustomerUserAccessInvitation",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerUserAccessInvitation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/CustomerUserAccessService
*/
public get customerUserAccesses() {
const service = this.loadService<services.CustomerUserAccessService>("CustomerUserAccessServiceClient")
type MutateOptions = never
return {
/**
* @description Retrieve a resources.CustomerUserAccess in full detail
* @warning Don't use get in production!
* @returns resources.CustomerUserAccess
*/
get: async (resourceName: string): Promise<resources.CustomerUserAccess> => {
const request = new services.GetCustomerUserAccessRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getCustomerUserAccess(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.ICustomerUserAccess
* @returns services.MutateCustomerUserAccessResponse
*/
update: async (
customerUserAccesses: (resources.ICustomerUserAccess | resources.CustomerUserAccess)[] ,
options?: MutateOptions
): Promise<services.MutateCustomerUserAccessResponse > => {
const ops = this.buildOperations<
services.CustomerUserAccessOperation,
resources.ICustomerUserAccess
>(
"update",
customerUserAccesses
// @ts-expect-error Static class type here is fine
, resources.CustomerUserAccess
);
const request = this.buildRequest<
services.CustomerUserAccessOperation,
services.IMutateCustomerUserAccessRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerUserAccessService.mutateCustomerUserAccess",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerUserAccess(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateCustomerUserAccessResponse
*/
remove: async (
customerUserAccesses: string[] ,
options?: MutateOptions
): Promise<services.MutateCustomerUserAccessResponse > => {
const ops = this.buildOperations<
services.CustomerUserAccessOperation,
string
>(
"remove",
customerUserAccesses
);
const request = this.buildRequest<
services.CustomerUserAccessOperation,
services.IMutateCustomerUserAccessRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "CustomerUserAccessService.mutateCustomerUserAccess",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateCustomerUserAccess(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DetailPlacementViewService
*/
public get detailPlacementViews() {
const service = this.loadService<services.DetailPlacementViewService>("DetailPlacementViewServiceClient")
return {
/**
* @description Retrieve a resources.DetailPlacementView in full detail
* @warning Don't use get in production!
* @returns resources.DetailPlacementView
*/
get: async (resourceName: string): Promise<resources.DetailPlacementView> => {
const request = new services.GetDetailPlacementViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDetailPlacementView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DetailedDemographicService
*/
public get detailedDemographics() {
const service = this.loadService<services.DetailedDemographicService>("DetailedDemographicServiceClient")
return {
/**
* @description Retrieve a resources.DetailedDemographic in full detail
* @warning Don't use get in production!
* @returns resources.DetailedDemographic
*/
get: async (resourceName: string): Promise<resources.DetailedDemographic> => {
const request = new services.GetDetailedDemographicRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDetailedDemographic(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DisplayKeywordViewService
*/
public get displayKeywordViews() {
const service = this.loadService<services.DisplayKeywordViewService>("DisplayKeywordViewServiceClient")
return {
/**
* @description Retrieve a resources.DisplayKeywordView in full detail
* @warning Don't use get in production!
* @returns resources.DisplayKeywordView
*/
get: async (resourceName: string): Promise<resources.DisplayKeywordView> => {
const request = new services.GetDisplayKeywordViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDisplayKeywordView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DistanceViewService
*/
public get distanceViews() {
const service = this.loadService<services.DistanceViewService>("DistanceViewServiceClient")
return {
/**
* @description Retrieve a resources.DistanceView in full detail
* @warning Don't use get in production!
* @returns resources.DistanceView
*/
get: async (resourceName: string): Promise<resources.DistanceView> => {
const request = new services.GetDistanceViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDistanceView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DomainCategoryService
*/
public get domainCategories() {
const service = this.loadService<services.DomainCategoryService>("DomainCategoryServiceClient")
return {
/**
* @description Retrieve a resources.DomainCategory in full detail
* @warning Don't use get in production!
* @returns resources.DomainCategory
*/
get: async (resourceName: string): Promise<resources.DomainCategory> => {
const request = new services.GetDomainCategoryRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDomainCategory(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/DynamicSearchAdsSearchTermViewService
*/
public get dynamicSearchAdsSearchTermViews() {
const service = this.loadService<services.DynamicSearchAdsSearchTermViewService>("DynamicSearchAdsSearchTermViewServiceClient")
return {
/**
* @description Retrieve a resources.DynamicSearchAdsSearchTermView in full detail
* @warning Don't use get in production!
* @returns resources.DynamicSearchAdsSearchTermView
*/
get: async (resourceName: string): Promise<resources.DynamicSearchAdsSearchTermView> => {
const request = new services.GetDynamicSearchAdsSearchTermViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getDynamicSearchAdsSearchTermView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ExpandedLandingPageViewService
*/
public get expandedLandingPageViews() {
const service = this.loadService<services.ExpandedLandingPageViewService>("ExpandedLandingPageViewServiceClient")
return {
/**
* @description Retrieve a resources.ExpandedLandingPageView in full detail
* @warning Don't use get in production!
* @returns resources.ExpandedLandingPageView
*/
get: async (resourceName: string): Promise<resources.ExpandedLandingPageView> => {
const request = new services.GetExpandedLandingPageViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getExpandedLandingPageView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/FeedPlaceholderViewService
*/
public get feedPlaceholderViews() {
const service = this.loadService<services.FeedPlaceholderViewService>("FeedPlaceholderViewServiceClient")
return {
/**
* @description Retrieve a resources.FeedPlaceholderView in full detail
* @warning Don't use get in production!
* @returns resources.FeedPlaceholderView
*/
get: async (resourceName: string): Promise<resources.FeedPlaceholderView> => {
const request = new services.GetFeedPlaceholderViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getFeedPlaceholderView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/GenderViewService
*/
public get genderViews() {
const service = this.loadService<services.GenderViewService>("GenderViewServiceClient")
return {
/**
* @description Retrieve a resources.GenderView in full detail
* @warning Don't use get in production!
* @returns resources.GenderView
*/
get: async (resourceName: string): Promise<resources.GenderView> => {
const request = new services.GetGenderViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getGenderView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/GeoTargetConstantService
*/
public get geoTargetConstants() {
const service = this.loadService<services.GeoTargetConstantService>("GeoTargetConstantServiceClient")
return {
/**
* @description Retrieve a resources.GeoTargetConstant in full detail
* @warning Don't use get in production!
* @returns resources.GeoTargetConstant
*/
get: async (resourceName: string): Promise<resources.GeoTargetConstant> => {
const request = new services.GetGeoTargetConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getGeoTargetConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/GeoTargetConstantService#suggestgeotargetconstants
*/
suggestGeoTargetConstants: async (request: services.SuggestGeoTargetConstantsRequest): Promise<services.SuggestGeoTargetConstantsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.suggestGeoTargetConstants(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/GeographicViewService
*/
public get geographicViews() {
const service = this.loadService<services.GeographicViewService>("GeographicViewServiceClient")
return {
/**
* @description Retrieve a resources.GeographicView in full detail
* @warning Don't use get in production!
* @returns resources.GeographicView
*/
get: async (resourceName: string): Promise<resources.GeographicView> => {
const request = new services.GetGeographicViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getGeographicView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/GroupPlacementViewService
*/
public get groupPlacementViews() {
const service = this.loadService<services.GroupPlacementViewService>("GroupPlacementViewServiceClient")
return {
/**
* @description Retrieve a resources.GroupPlacementView in full detail
* @warning Don't use get in production!
* @returns resources.GroupPlacementView
*/
get: async (resourceName: string): Promise<resources.GroupPlacementView> => {
const request = new services.GetGroupPlacementViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getGroupPlacementView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/HotelGroupViewService
*/
public get hotelGroupViews() {
const service = this.loadService<services.HotelGroupViewService>("HotelGroupViewServiceClient")
return {
/**
* @description Retrieve a resources.HotelGroupView in full detail
* @warning Don't use get in production!
* @returns resources.HotelGroupView
*/
get: async (resourceName: string): Promise<resources.HotelGroupView> => {
const request = new services.GetHotelGroupViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getHotelGroupView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/HotelPerformanceViewService
*/
public get hotelPerformanceViews() {
const service = this.loadService<services.HotelPerformanceViewService>("HotelPerformanceViewServiceClient")
return {
/**
* @description Retrieve a resources.HotelPerformanceView in full detail
* @warning Don't use get in production!
* @returns resources.HotelPerformanceView
*/
get: async (resourceName: string): Promise<resources.HotelPerformanceView> => {
const request = new services.GetHotelPerformanceViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getHotelPerformanceView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/IncomeRangeViewService
*/
public get incomeRangeViews() {
const service = this.loadService<services.IncomeRangeViewService>("IncomeRangeViewServiceClient")
return {
/**
* @description Retrieve a resources.IncomeRangeView in full detail
* @warning Don't use get in production!
* @returns resources.IncomeRangeView
*/
get: async (resourceName: string): Promise<resources.IncomeRangeView> => {
const request = new services.GetIncomeRangeViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getIncomeRangeView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/InvoiceService
*/
public get invoices() {
const service = this.loadService<services.InvoiceService>("InvoiceServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/InvoiceService#listinvoices
*/
listInvoices: async (request: services.ListInvoicesRequest): Promise<services.ListInvoicesResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listInvoices(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanIdeaService
*/
public get keywordPlanIdeas() {
const service = this.loadService<services.KeywordPlanIdeaService>("KeywordPlanIdeaServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordPlanIdeaService#generatekeywordideas
*/
generateKeywordIdeas: async (request: services.GenerateKeywordIdeasRequest): Promise<services.GenerateKeywordIdeaResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateKeywordIdeas(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordThemeConstantService
*/
public get keywordThemeConstants() {
const service = this.loadService<services.KeywordThemeConstantService>("KeywordThemeConstantServiceClient")
return {
/**
* @description Retrieve a resources.KeywordThemeConstant in full detail
* @warning Don't use get in production!
* @returns resources.KeywordThemeConstant
*/
get: async (resourceName: string): Promise<resources.KeywordThemeConstant> => {
const request = new services.GetKeywordThemeConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordThemeConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordThemeConstantService#suggestkeywordthemeconstants
*/
suggestKeywordThemeConstants: async (request: services.SuggestKeywordThemeConstantsRequest): Promise<services.SuggestKeywordThemeConstantsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.suggestKeywordThemeConstants(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/KeywordViewService
*/
public get keywordViews() {
const service = this.loadService<services.KeywordViewService>("KeywordViewServiceClient")
return {
/**
* @description Retrieve a resources.KeywordView in full detail
* @warning Don't use get in production!
* @returns resources.KeywordView
*/
get: async (resourceName: string): Promise<resources.KeywordView> => {
const request = new services.GetKeywordViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getKeywordView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/LandingPageViewService
*/
public get landingPageViews() {
const service = this.loadService<services.LandingPageViewService>("LandingPageViewServiceClient")
return {
/**
* @description Retrieve a resources.LandingPageView in full detail
* @warning Don't use get in production!
* @returns resources.LandingPageView
*/
get: async (resourceName: string): Promise<resources.LandingPageView> => {
const request = new services.GetLandingPageViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getLandingPageView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/LanguageConstantService
*/
public get languageConstants() {
const service = this.loadService<services.LanguageConstantService>("LanguageConstantServiceClient")
return {
/**
* @description Retrieve a resources.LanguageConstant in full detail
* @warning Don't use get in production!
* @returns resources.LanguageConstant
*/
get: async (resourceName: string): Promise<resources.LanguageConstant> => {
const request = new services.GetLanguageConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getLanguageConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/LifeEventService
*/
public get lifeEvents() {
const service = this.loadService<services.LifeEventService>("LifeEventServiceClient")
return {
/**
* @description Retrieve a resources.LifeEvent in full detail
* @warning Don't use get in production!
* @returns resources.LifeEvent
*/
get: async (resourceName: string): Promise<resources.LifeEvent> => {
const request = new services.GetLifeEventRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getLifeEvent(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/LocationViewService
*/
public get locationViews() {
const service = this.loadService<services.LocationViewService>("LocationViewServiceClient")
return {
/**
* @description Retrieve a resources.LocationView in full detail
* @warning Don't use get in production!
* @returns resources.LocationView
*/
get: async (resourceName: string): Promise<resources.LocationView> => {
const request = new services.GetLocationViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getLocationView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ManagedPlacementViewService
*/
public get managedPlacementViews() {
const service = this.loadService<services.ManagedPlacementViewService>("ManagedPlacementViewServiceClient")
return {
/**
* @description Retrieve a resources.ManagedPlacementView in full detail
* @warning Don't use get in production!
* @returns resources.ManagedPlacementView
*/
get: async (resourceName: string): Promise<resources.ManagedPlacementView> => {
const request = new services.GetManagedPlacementViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getManagedPlacementView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/MerchantCenterLinkService
*/
public get merchantCenterLinks() {
const service = this.loadService<services.MerchantCenterLinkService>("MerchantCenterLinkServiceClient")
type MutateOptions = Partial<Pick<services.IMutateMerchantCenterLinkRequest, "validate_only">>
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/MerchantCenterLinkService#listmerchantcenterlinks
*/
listMerchantCenterLinks: async (request: services.ListMerchantCenterLinksRequest): Promise<services.ListMerchantCenterLinksResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listMerchantCenterLinks(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description Retrieve a resources.MerchantCenterLink in full detail
* @warning Don't use get in production!
* @returns resources.MerchantCenterLink
*/
get: async (resourceName: string): Promise<resources.MerchantCenterLink> => {
const request = new services.GetMerchantCenterLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getMerchantCenterLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description update resources of type resources.IMerchantCenterLink
* @returns services.MutateMerchantCenterLinkResponse
*/
update: async (
merchantCenterLinks: (resources.IMerchantCenterLink | resources.MerchantCenterLink)[] ,
options?: MutateOptions
): Promise<services.MutateMerchantCenterLinkResponse > => {
const ops = this.buildOperations<
services.MerchantCenterLinkOperation,
resources.IMerchantCenterLink
>(
"update",
merchantCenterLinks
// @ts-expect-error Static class type here is fine
, resources.MerchantCenterLink
);
const request = this.buildRequest<
services.MerchantCenterLinkOperation,
services.IMutateMerchantCenterLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "MerchantCenterLinkService.mutateMerchantCenterLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateMerchantCenterLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
,
/**
* @description remove resources of type string
* @returns services.MutateMerchantCenterLinkResponse
*/
remove: async (
merchantCenterLinks: string[] ,
options?: MutateOptions
): Promise<services.MutateMerchantCenterLinkResponse > => {
const ops = this.buildOperations<
services.MerchantCenterLinkOperation,
string
>(
"remove",
merchantCenterLinks
);
const request = this.buildRequest<
services.MerchantCenterLinkOperation,
services.IMutateMerchantCenterLinkRequest,
MutateOptions
>(ops, options);
const baseHookArguments: BaseMutationHookArgs = {
credentials: this.credentials,
method: "MerchantCenterLinkService.mutateMerchantCenterLink",
mutation: request,
isServiceCall: true,
};
if (this.hooks.onMutationStart) {
const mutationCancellation: HookedCancellation = { cancelled: false };
await this.hooks.onMutationStart({
...baseHookArguments,
cancel: (res) => {
mutationCancellation.cancelled = true;
mutationCancellation.res = res;
},
editOptions: (options) => {
Object.entries(options).forEach(([key, val]) => {
// @ts-expect-error Index with key type is fine
request[key] = val;
});
},
});
if (mutationCancellation.cancelled) {
return mutationCancellation.res;
}
}
try {
// @ts-expect-error Response is an array type
const [response] = await service.mutateMerchantCenterLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
if (this.hooks.onMutationEnd) {
const mutationResolution: HookedResolution = { resolved: false };
await this.hooks.onMutationEnd({
...baseHookArguments,
response,
resolve: (res) => {
mutationResolution.resolved = true;
mutationResolution.res = res;
},
});
if (mutationResolution.resolved) {
return mutationResolution.res;
}
}
return response;
} catch (err) {
const googleAdsError = this.getGoogleAdsError(err);
if (this.hooks.onMutationError) {
await this.hooks.onMutationError({
...baseHookArguments,
error: googleAdsError,
});
}
throw googleAdsError;
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/MobileAppCategoryConstantService
*/
public get mobileAppCategoryConstants() {
const service = this.loadService<services.MobileAppCategoryConstantService>("MobileAppCategoryConstantServiceClient")
return {
/**
* @description Retrieve a resources.MobileAppCategoryConstant in full detail
* @warning Don't use get in production!
* @returns resources.MobileAppCategoryConstant
*/
get: async (resourceName: string): Promise<resources.MobileAppCategoryConstant> => {
const request = new services.GetMobileAppCategoryConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getMobileAppCategoryConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/MobileDeviceConstantService
*/
public get mobileDeviceConstants() {
const service = this.loadService<services.MobileDeviceConstantService>("MobileDeviceConstantServiceClient")
return {
/**
* @description Retrieve a resources.MobileDeviceConstant in full detail
* @warning Don't use get in production!
* @returns resources.MobileDeviceConstant
*/
get: async (resourceName: string): Promise<resources.MobileDeviceConstant> => {
const request = new services.GetMobileDeviceConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getMobileDeviceConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/OfflineUserDataJobService
*/
public get offlineUserDataJobs() {
const service = this.loadService<services.OfflineUserDataJobService>("OfflineUserDataJobServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/OfflineUserDataJobService#createofflineuserdatajob
*/
createOfflineUserDataJob: async (request: services.CreateOfflineUserDataJobRequest): Promise<services.CreateOfflineUserDataJobResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.createOfflineUserDataJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @description Retrieve a resources.OfflineUserDataJob in full detail
* @warning Don't use get in production!
* @returns resources.OfflineUserDataJob
*/
get: async (resourceName: string): Promise<resources.OfflineUserDataJob> => {
const request = new services.GetOfflineUserDataJobRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getOfflineUserDataJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/OfflineUserDataJobService#addofflineuserdatajoboperations
*/
addOfflineUserDataJobOperations: async (request: services.AddOfflineUserDataJobOperationsRequest): Promise<services.AddOfflineUserDataJobOperationsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.addOfflineUserDataJobOperations(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/OfflineUserDataJobService#runofflineuserdatajob
*/
runOfflineUserDataJob: async (request: services.RunOfflineUserDataJobRequest): Promise<longrunning.Operation> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.runOfflineUserDataJob(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/OperatingSystemVersionConstantService
*/
public get operatingSystemVersionConstants() {
const service = this.loadService<services.OperatingSystemVersionConstantService>("OperatingSystemVersionConstantServiceClient")
return {
/**
* @description Retrieve a resources.OperatingSystemVersionConstant in full detail
* @warning Don't use get in production!
* @returns resources.OperatingSystemVersionConstant
*/
get: async (resourceName: string): Promise<resources.OperatingSystemVersionConstant> => {
const request = new services.GetOperatingSystemVersionConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getOperatingSystemVersionConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/PaidOrganicSearchTermViewService
*/
public get paidOrganicSearchTermViews() {
const service = this.loadService<services.PaidOrganicSearchTermViewService>("PaidOrganicSearchTermViewServiceClient")
return {
/**
* @description Retrieve a resources.PaidOrganicSearchTermView in full detail
* @warning Don't use get in production!
* @returns resources.PaidOrganicSearchTermView
*/
get: async (resourceName: string): Promise<resources.PaidOrganicSearchTermView> => {
const request = new services.GetPaidOrganicSearchTermViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getPaidOrganicSearchTermView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ParentalStatusViewService
*/
public get parentalStatusViews() {
const service = this.loadService<services.ParentalStatusViewService>("ParentalStatusViewServiceClient")
return {
/**
* @description Retrieve a resources.ParentalStatusView in full detail
* @warning Don't use get in production!
* @returns resources.ParentalStatusView
*/
get: async (resourceName: string): Promise<resources.ParentalStatusView> => {
const request = new services.GetParentalStatusViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getParentalStatusView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/PaymentsAccountService
*/
public get paymentsAccounts() {
const service = this.loadService<services.PaymentsAccountService>("PaymentsAccountServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/PaymentsAccountService#listpaymentsaccounts
*/
listPaymentsAccounts: async (request: services.ListPaymentsAccountsRequest): Promise<services.ListPaymentsAccountsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listPaymentsAccounts(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ProductBiddingCategoryConstantService
*/
public get productBiddingCategoryConstants() {
const service = this.loadService<services.ProductBiddingCategoryConstantService>("ProductBiddingCategoryConstantServiceClient")
return {
/**
* @description Retrieve a resources.ProductBiddingCategoryConstant in full detail
* @warning Don't use get in production!
* @returns resources.ProductBiddingCategoryConstant
*/
get: async (resourceName: string): Promise<resources.ProductBiddingCategoryConstant> => {
const request = new services.GetProductBiddingCategoryConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getProductBiddingCategoryConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ProductGroupViewService
*/
public get productGroupViews() {
const service = this.loadService<services.ProductGroupViewService>("ProductGroupViewServiceClient")
return {
/**
* @description Retrieve a resources.ProductGroupView in full detail
* @warning Don't use get in production!
* @returns resources.ProductGroupView
*/
get: async (resourceName: string): Promise<resources.ProductGroupView> => {
const request = new services.GetProductGroupViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getProductGroupView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ReachPlanService
*/
public get reachPlans() {
const service = this.loadService<services.ReachPlanService>("ReachPlanServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ReachPlanService#listplannablelocations
*/
listPlannableLocations: async (request: services.ListPlannableLocationsRequest): Promise<services.ListPlannableLocationsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listPlannableLocations(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ReachPlanService#listplannableproducts
*/
listPlannableProducts: async (request: services.ListPlannableProductsRequest): Promise<services.ListPlannableProductsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.listPlannableProducts(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ReachPlanService#generateproductmixideas
*/
generateProductMixIdeas: async (request: services.GenerateProductMixIdeasRequest): Promise<services.GenerateProductMixIdeasResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateProductMixIdeas(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ReachPlanService#generatereachforecast
*/
generateReachForecast: async (request: services.GenerateReachForecastRequest): Promise<services.GenerateReachForecastResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.generateReachForecast(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/RecommendationService
*/
public get recommendations() {
const service = this.loadService<services.RecommendationService>("RecommendationServiceClient")
return {
/**
* @description Retrieve a resources.Recommendation in full detail
* @warning Don't use get in production!
* @returns resources.Recommendation
*/
get: async (resourceName: string): Promise<resources.Recommendation> => {
const request = new services.GetRecommendationRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getRecommendation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/RecommendationService#applyrecommendation
*/
applyRecommendation: async (request: services.ApplyRecommendationRequest): Promise<services.ApplyRecommendationResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.applyRecommendation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/RecommendationService#dismissrecommendation
*/
dismissRecommendation: async (request: services.DismissRecommendationRequest): Promise<services.DismissRecommendationResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.dismissRecommendation(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SearchTermViewService
*/
public get searchTermViews() {
const service = this.loadService<services.SearchTermViewService>("SearchTermViewServiceClient")
return {
/**
* @description Retrieve a resources.SearchTermView in full detail
* @warning Don't use get in production!
* @returns resources.SearchTermView
*/
get: async (resourceName: string): Promise<resources.SearchTermView> => {
const request = new services.GetSearchTermViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getSearchTermView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ShoppingPerformanceViewService
*/
public get shoppingPerformanceViews() {
const service = this.loadService<services.ShoppingPerformanceViewService>("ShoppingPerformanceViewServiceClient")
return {
/**
* @description Retrieve a resources.ShoppingPerformanceView in full detail
* @warning Don't use get in production!
* @returns resources.ShoppingPerformanceView
*/
get: async (resourceName: string): Promise<resources.ShoppingPerformanceView> => {
const request = new services.GetShoppingPerformanceViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getShoppingPerformanceView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSearchTermViewService
*/
public get smartCampaignSearchTermViews() {
const service = this.loadService<services.SmartCampaignSearchTermViewService>("SmartCampaignSearchTermViewServiceClient")
return {
/**
* @description Retrieve a resources.SmartCampaignSearchTermView in full detail
* @warning Don't use get in production!
* @returns resources.SmartCampaignSearchTermView
*/
get: async (resourceName: string): Promise<resources.SmartCampaignSearchTermView> => {
const request = new services.GetSmartCampaignSearchTermViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getSmartCampaignSearchTermView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSuggestService
*/
public get smartCampaignSuggests() {
const service = this.loadService<services.SmartCampaignSuggestService>("SmartCampaignSuggestServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSuggestService#suggestsmartcampaignbudgetoptions
*/
suggestSmartCampaignBudgetOptions: async (request: services.SuggestSmartCampaignBudgetOptionsRequest): Promise<services.SuggestSmartCampaignBudgetOptionsResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.suggestSmartCampaignBudgetOptions(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSuggestService#suggestsmartcampaignad
*/
suggestSmartCampaignAd: async (request: services.SuggestSmartCampaignAdRequest): Promise<services.SuggestSmartCampaignAdResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.suggestSmartCampaignAd(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/SmartCampaignSuggestService#suggestkeywordthemes
*/
suggestKeywordThemes: async (request: services.SuggestKeywordThemesRequest): Promise<services.SuggestKeywordThemesResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.suggestKeywordThemes(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ThirdPartyAppAnalyticsLinkService
*/
public get thirdPartyAppAnalyticsLinks() {
const service = this.loadService<services.ThirdPartyAppAnalyticsLinkService>("ThirdPartyAppAnalyticsLinkServiceClient")
return {
/**
* @description Retrieve a resources.ThirdPartyAppAnalyticsLink in full detail
* @warning Don't use get in production!
* @returns resources.ThirdPartyAppAnalyticsLink
*/
get: async (resourceName: string): Promise<resources.ThirdPartyAppAnalyticsLink> => {
const request = new services.GetThirdPartyAppAnalyticsLinkRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getThirdPartyAppAnalyticsLink(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
,
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/ThirdPartyAppAnalyticsLinkService#regenerateshareablelinkid
*/
regenerateShareableLinkId: async (request: services.RegenerateShareableLinkIdRequest): Promise<services.RegenerateShareableLinkIdResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.regenerateShareableLinkId(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/TopicConstantService
*/
public get topicConstants() {
const service = this.loadService<services.TopicConstantService>("TopicConstantServiceClient")
return {
/**
* @description Retrieve a resources.TopicConstant in full detail
* @warning Don't use get in production!
* @returns resources.TopicConstant
*/
get: async (resourceName: string): Promise<resources.TopicConstant> => {
const request = new services.GetTopicConstantRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getTopicConstant(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/TopicViewService
*/
public get topicViews() {
const service = this.loadService<services.TopicViewService>("TopicViewServiceClient")
return {
/**
* @description Retrieve a resources.TopicView in full detail
* @warning Don't use get in production!
* @returns resources.TopicView
*/
get: async (resourceName: string): Promise<resources.TopicView> => {
const request = new services.GetTopicViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getTopicView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/UserDataService
*/
public get userData() {
const service = this.loadService<services.UserDataService>("UserDataServiceClient")
return {
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/UserDataService#uploaduserdata
*/
uploadUserData: async (request: services.UploadUserDataRequest): Promise<services.UploadUserDataResponse> => {
try {
// @ts-expect-error Response is an array type
const [response] = await service.uploadUserData(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/UserInterestService
*/
public get userInterests() {
const service = this.loadService<services.UserInterestService>("UserInterestServiceClient")
return {
/**
* @description Retrieve a resources.UserInterest in full detail
* @warning Don't use get in production!
* @returns resources.UserInterest
*/
get: async (resourceName: string): Promise<resources.UserInterest> => {
const request = new services.GetUserInterestRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getUserInterest(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/UserLocationViewService
*/
public get userLocationViews() {
const service = this.loadService<services.UserLocationViewService>("UserLocationViewServiceClient")
return {
/**
* @description Retrieve a resources.UserLocationView in full detail
* @warning Don't use get in production!
* @returns resources.UserLocationView
*/
get: async (resourceName: string): Promise<resources.UserLocationView> => {
const request = new services.GetUserLocationViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getUserLocationView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/VideoService
*/
public get videos() {
const service = this.loadService<services.VideoService>("VideoServiceClient")
return {
/**
* @description Retrieve a resources.Video in full detail
* @warning Don't use get in production!
* @returns resources.Video
*/
get: async (resourceName: string): Promise<resources.Video> => {
const request = new services.GetVideoRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getVideo(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
/**
* @link https://developers.google.com/google-ads/api/reference/rpc/v9/WebpageViewService
*/
public get webpageViews() {
const service = this.loadService<services.WebpageViewService>("WebpageViewServiceClient")
return {
/**
* @description Retrieve a resources.WebpageView in full detail
* @warning Don't use get in production!
* @returns resources.WebpageView
*/
get: async (resourceName: string): Promise<resources.WebpageView> => {
const request = new services.GetWebpageViewRequest({
resource_name: resourceName,
});
try {
// @ts-expect-error Response is an array type
const [response] = await service.getWebpageView(request, {
// @ts-expect-error This arg doesn't exist in the type definitions
otherArgs: {
headers: this.callHeaders,
},
});
return response;
} catch (err) {
throw this.getGoogleAdsError(err);
}
}
}
}
} | the_stack |
* Tree data structure
*/
import * as zrUtil from 'zrender/src/core/util';
import Model from '../model/Model';
import linkSeriesData from './helper/linkSeriesData';
import SeriesData from './SeriesData';
import prepareSeriesDataSchema from './helper/createDimensions';
import {
DimensionLoose, ParsedValue, OptionDataValue,
OptionDataItemObject
} from '../util/types';
import { Dictionary } from 'zrender/src/core/types';
import { convertOptionIdName } from '../util/model';
type TreeTraverseOrder = 'preorder' | 'postorder';
type TreeTraverseCallback<Ctx> = (this: Ctx, node: TreeNode) => boolean | void;
type TreeTraverseOption = {
order?: TreeTraverseOrder
attr?: 'children' | 'viewChildren'
};
interface TreeNodeOption extends Pick<OptionDataItemObject<OptionDataValue>, 'name' | 'value'> {
children?: TreeNodeOption[];
}
export class TreeNode {
name: string;
depth: number = 0;
height: number = 0;
parentNode: TreeNode;
/**
* Reference to list item.
* Do not persistent dataIndex outside,
* besause it may be changed by list.
* If dataIndex -1,
* this node is logical deleted (filtered) in list.
*/
dataIndex: number = -1;
children: TreeNode[] = [];
viewChildren: TreeNode[] = [];
isExpand: boolean = false;
readonly hostTree: Tree<Model>;
constructor(name: string, hostTree: Tree<Model>) {
this.name = name || '';
this.hostTree = hostTree;
}
/**
* The node is removed.
*/
isRemoved(): boolean {
return this.dataIndex < 0;
}
/**
* Travel this subtree (include this node).
* Usage:
* node.eachNode(function () { ... }); // preorder
* node.eachNode('preorder', function () { ... }); // preorder
* node.eachNode('postorder', function () { ... }); // postorder
* node.eachNode(
* {order: 'postorder', attr: 'viewChildren'},
* function () { ... }
* ); // postorder
*
* @param options If string, means order.
* @param options.order 'preorder' or 'postorder'
* @param options.attr 'children' or 'viewChildren'
* @param cb If in preorder and return false,
* its subtree will not be visited.
*/
eachNode<Ctx>(options: TreeTraverseOrder, cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(options: TreeTraverseOption, cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(
options: TreeTraverseOrder | TreeTraverseOption | TreeTraverseCallback<Ctx>,
cb?: TreeTraverseCallback<Ctx> | Ctx,
context?: Ctx
) {
if (typeof options === 'function') {
context = cb as Ctx;
cb = options;
options = null;
}
options = options || {};
if (zrUtil.isString(options)) {
options = {order: options};
}
const order = (options as TreeTraverseOption).order || 'preorder';
const children = this[(options as TreeTraverseOption).attr || 'children'];
let suppressVisitSub;
order === 'preorder' && (suppressVisitSub = (cb as TreeTraverseCallback<Ctx>).call(context as Ctx, this));
for (let i = 0; !suppressVisitSub && i < children.length; i++) {
children[i].eachNode(
options as TreeTraverseOption,
cb as TreeTraverseCallback<Ctx>,
context
);
}
order === 'postorder' && (cb as TreeTraverseCallback<Ctx>).call(context, this);
}
/**
* Update depth and height of this subtree.
*/
updateDepthAndHeight(depth: number) {
let height = 0;
this.depth = depth;
for (let i = 0; i < this.children.length; i++) {
const child = this.children[i];
child.updateDepthAndHeight(depth + 1);
if (child.height > height) {
height = child.height;
}
}
this.height = height + 1;
}
getNodeById(id: string): TreeNode {
if (this.getId() === id) {
return this;
}
for (let i = 0, children = this.children, len = children.length; i < len; i++) {
const res = children[i].getNodeById(id);
if (res) {
return res;
}
}
}
contains(node: TreeNode): boolean {
if (node === this) {
return true;
}
for (let i = 0, children = this.children, len = children.length; i < len; i++) {
const res = children[i].contains(node);
if (res) {
return res;
}
}
}
/**
* @param includeSelf Default false.
* @return order: [root, child, grandchild, ...]
*/
getAncestors(includeSelf?: boolean): TreeNode[] {
const ancestors = [];
let node = includeSelf ? this : this.parentNode;
while (node) {
ancestors.push(node);
node = node.parentNode;
}
ancestors.reverse();
return ancestors;
}
getAncestorsIndices(): number[] {
const indices: number[] = [];
let currNode = this as TreeNode;
while (currNode) {
indices.push(currNode.dataIndex);
currNode = currNode.parentNode;
}
indices.reverse();
return indices;
}
getDescendantIndices(): number[] {
const indices: number[] = [];
this.eachNode(childNode => {
indices.push(childNode.dataIndex);
});
return indices;
}
getValue(dimension?: DimensionLoose): ParsedValue {
const data = this.hostTree.data;
return data.getStore().get(data.getDimensionIndex(dimension || 'value'), this.dataIndex);
}
setLayout(layout: any, merge?: boolean) {
this.dataIndex >= 0
&& this.hostTree.data.setItemLayout(this.dataIndex, layout, merge);
}
/**
* @return {Object} layout
*/
getLayout(): any {
return this.hostTree.data.getItemLayout(this.dataIndex);
}
getModel<T = unknown>(): Model<T>
// @depcrecated
// getModel<T = unknown, S extends keyof T = keyof T>(path: S): Model<T[S]>
// eslint-disable-next-line @typescript-eslint/no-unused-vars
getModel<T = unknown>(path?: string): Model {
if (this.dataIndex < 0) {
return;
}
const hostTree = this.hostTree;
const itemModel = hostTree.data.getItemModel(this.dataIndex);
return itemModel.getModel(path as any);
}
// TODO: TYPE More specific model
getLevelModel(): Model {
return (this.hostTree.levelModels || [])[this.depth];
}
/**
* @example
* setItemVisual('color', color);
* setItemVisual({
* 'color': color
* });
*/
// TODO: TYPE
setVisual(key: string, value: any): void
setVisual(obj: Dictionary<any>): void
setVisual(key: string | Dictionary<any>, value?: any) {
this.dataIndex >= 0
&& this.hostTree.data.setItemVisual(this.dataIndex, key as any, value);
}
/**
* Get item visual
* FIXME: make return type better
*/
getVisual(key: string): unknown {
return this.hostTree.data.getItemVisual(this.dataIndex, key as any);
}
getRawIndex(): number {
return this.hostTree.data.getRawIndex(this.dataIndex);
}
getId(): string {
return this.hostTree.data.getId(this.dataIndex);
}
/**
* index in parent's children
*/
getChildIndex(): number {
if (this.parentNode) {
const children = this.parentNode.children;
for (let i = 0; i < children.length; ++i) {
if (children[i] === this) {
return i;
}
}
return -1;
}
return -1;
}
/**
* if this is an ancestor of another node
*
* @param node another node
* @return if is ancestor
*/
isAncestorOf(node: TreeNode): boolean {
let parent = node.parentNode;
while (parent) {
if (parent === this) {
return true;
}
parent = parent.parentNode;
}
return false;
}
/**
* if this is an descendant of another node
*
* @param node another node
* @return if is descendant
*/
isDescendantOf(node: TreeNode): boolean {
return node !== this && node.isAncestorOf(this);
}
};
class Tree<HostModel extends Model = Model, LevelOption = any> {
type: 'tree' = 'tree';
root: TreeNode;
data: SeriesData;
hostModel: HostModel;
levelModels: Model<LevelOption>[];
private _nodes: TreeNode[] = [];
constructor(hostModel: HostModel) {
this.hostModel = hostModel;
}
/**
* Travel this subtree (include this node).
* Usage:
* node.eachNode(function () { ... }); // preorder
* node.eachNode('preorder', function () { ... }); // preorder
* node.eachNode('postorder', function () { ... }); // postorder
* node.eachNode(
* {order: 'postorder', attr: 'viewChildren'},
* function () { ... }
* ); // postorder
*
* @param options If string, means order.
* @param options.order 'preorder' or 'postorder'
* @param options.attr 'children' or 'viewChildren'
* @param cb
* @param context
*/
eachNode<Ctx>(options: TreeTraverseOrder, cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(options: TreeTraverseOption, cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(cb: TreeTraverseCallback<Ctx>, context?: Ctx): void
eachNode<Ctx>(
options: TreeTraverseOrder | TreeTraverseOption | TreeTraverseCallback<Ctx>,
cb?: TreeTraverseCallback<Ctx> | Ctx,
context?: Ctx
) {
this.root.eachNode(options as TreeTraverseOption, cb as TreeTraverseCallback<Ctx>, context);
}
getNodeByDataIndex(dataIndex: number): TreeNode {
const rawIndex = this.data.getRawIndex(dataIndex);
return this._nodes[rawIndex];
}
getNodeById(name: string): TreeNode {
return this.root.getNodeById(name);
}
/**
* Update item available by list,
* when list has been performed options like 'filterSelf' or 'map'.
*/
update() {
const data = this.data;
const nodes = this._nodes;
for (let i = 0, len = nodes.length; i < len; i++) {
nodes[i].dataIndex = -1;
}
for (let i = 0, len = data.count(); i < len; i++) {
nodes[data.getRawIndex(i)].dataIndex = i;
}
}
/**
* Clear all layouts
*/
clearLayouts() {
this.data.clearItemLayouts();
}
/**
* data node format:
* {
* name: ...
* value: ...
* children: [
* {
* name: ...
* value: ...
* children: ...
* },
* ...
* ]
* }
*/
static createTree<T extends TreeNodeOption, HostModel extends Model>(
dataRoot: T,
hostModel: HostModel,
beforeLink?: (data: SeriesData) => void
) {
const tree = new Tree(hostModel);
const listData: TreeNodeOption[] = [];
let dimMax = 1;
buildHierarchy(dataRoot);
function buildHierarchy(dataNode: TreeNodeOption, parentNode?: TreeNode) {
const value = dataNode.value;
dimMax = Math.max(dimMax, zrUtil.isArray(value) ? value.length : 1);
listData.push(dataNode);
const node = new TreeNode(convertOptionIdName(dataNode.name, ''), tree);
parentNode
? addChild(node, parentNode)
: (tree.root = node);
tree._nodes.push(node);
const children = dataNode.children;
if (children) {
for (let i = 0; i < children.length; i++) {
buildHierarchy(children[i], node);
}
}
}
tree.root.updateDepthAndHeight(0);
const { dimensions } = prepareSeriesDataSchema(listData, {
coordDimensions: ['value'],
dimensionsCount: dimMax
});
const list = new SeriesData(dimensions, hostModel);
list.initData(listData);
beforeLink && beforeLink(list);
linkSeriesData({
mainData: list,
struct: tree,
structAttr: 'tree'
});
tree.update();
return tree;
}
}
/**
* It is needed to consider the mess of 'list', 'hostModel' when creating a TreeNote,
* so this function is not ready and not necessary to be public.
*/
function addChild(child: TreeNode, node: TreeNode) {
const children = node.children;
if (child.parentNode === node) {
return;
}
children.push(child);
child.parentNode = node;
}
export default Tree; | the_stack |
import {ENGINE} from '../engine';
import {customGrad} from '../gradients';
import {Tensor} from '../tensor';
import {convertToTensor} from '../tensor_util_env';
import {TensorLike} from '../types';
import * as util from '../util';
import * as axis_util from './axis_util';
import {op} from './operation';
import {ones, scalar, zerosLike} from './tensor_ops';
/**
* Computes the log(sum(exp(elements across the reduction dimensions)).
*
* Reduces the input along the dimensions given in `axis`. Unless `keepDims`
* is true, the rank of the array is reduced by 1 for each entry in `axis`.
* If `keepDims` is true, the reduced dimensions are retained with length 1.
* If `axis` has no entries, all dimensions are reduced, and an array with a
* single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.logSumExp().print(); // or tf.logSumExp(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.logSumExp(axis).print(); // or tf.logSumExp(a, axis)
* ```
* @param x The input tensor.
* @param axis The dimension(s) to reduce. If null (the default),
* reduces all dimensions.
* @param keepDims If true, retains reduced dimensions with length
* of 1. Defaults to false.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function logSumExp_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
const $x = convertToTensor(x, 'x', 'logSumExp');
const axes = util.parseAxisParam(axis, $x.shape);
const xMax = $x.max(axes, true /* keepDims */);
const a = $x.sub(xMax);
const b = a.exp();
const c = b.sum(axes);
const d = c.log();
const res = xMax.reshape(d.shape).add(d);
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(res.shape, axes);
return res.reshape(newShape) as T;
}
return res as T;
}
/**
* Computes the sum of elements across dimensions of a `tf.Tensor`.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in
* `axes`. If `keepDims` is true, the reduced dimensions are retained with
* length 1. If axes has no entries, all dimensions are reduced, and a
* `tf.Tensor` with a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.sum().print(); // or tf.sum(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.sum(axis).print(); // or tf.sum(x, axis)
* ```
*
* @param x The input tensor to compute the sum over. If the dtype is `bool`
* it will be converted to `int32` and the output dtype will be `int32`.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function sum_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'sum');
if ($x.dtype === 'bool') {
$x = $x.toInt();
}
const axes = util.parseAxisParam(axis, $x.shape);
// Use a custom gradient to bypass 2 gradient backprops since sum is used
// extremely often.
const customOp = customGrad((x: Tensor) => {
const permutation = axis_util.getAxesPermutation(axes, x.rank);
let reductionAxes = axes;
let permutedX = x;
if (permutation != null) {
permutedX = x.transpose(permutation);
reductionAxes = axis_util.getInnerMostAxes(reductionAxes.length, x.rank);
}
let value = ENGINE.runKernel(
backend => backend.sum(permutedX, reductionAxes), {permutedX});
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(value.shape, axes);
value = value.reshape(newShape);
}
const gradFunc = (dy: Tensor) => {
const expandedDyShape = x.shape.slice();
axes.forEach(axis => {
expandedDyShape[axis] = 1;
});
const expandedDy = dy.reshape(expandedDyShape);
const derX = expandedDy.mul(ones(x.shape, 'float32'));
return derX;
};
return {value, gradFunc};
});
return customOp($x) as T;
}
/**
* Computes the product of elements across dimensions of a `tf.Tensor`.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in
* `axes`. If `keepDims` is true, the reduced dimensions are retained with
* length 1. If `axes` has no entries, all dimensions are reduced, and a
* `tf.Tensor` with a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.prod().print(); // or tf.prod(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.prod(axis).print(); // or tf.prod(x, axis)
* ```
*
* @param x The input tensor to compute the product over. If the dtype is `bool`
* it will be converted to `int32` and the output dtype will be `int32`.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function prod_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'prod');
if ($x.dtype === 'bool') {
$x = $x.toInt();
}
const axes = util.parseAxisParam(axis, $x.shape);
const permutation = axis_util.getAxesPermutation(axes, $x.rank);
let reductionAxes = axes;
let permutedX = $x;
if (permutation != null) {
permutedX = $x.transpose(permutation);
reductionAxes = axis_util.getInnerMostAxes(reductionAxes.length, $x.rank);
}
let value = ENGINE.runKernel(
backend => backend.prod(permutedX, reductionAxes), {permutedX});
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(value.shape, axes);
value = value.reshape(newShape);
}
return value as T;
}
/**
* Computes the mean of elements across dimensions of a `tf.Tensor`.
*
* Reduces `x` along the dimensions given in `axis`. Unless `keepDims` is
* true, the rank of the `tf.Tensor` is reduced by 1 for each entry in `axis`.
* If `keepDims` is true, the reduced dimensions are retained with length 1.
* If `axis` has no entries, all dimensions are reduced, and a `tf.Tensor` with
* a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.mean().print(); // or tf.mean(a)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.mean(axis).print(); // or tf.mean(x, axis)
* ```
*
* @param x The input tensor.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function mean_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
const $x = convertToTensor(x, 'x', 'mean');
const axes = util.parseAxisParam(axis, $x.shape);
const shapes = axis_util.computeOutAndReduceShapes($x.shape, axes);
const reduceShape = shapes[1];
const reduceSize = util.sizeFromShape(reduceShape);
// Use a custom gradient to bypass 2 gradient backprops since mean is used
// extremely often.
const customOp = customGrad((x: Tensor) => {
const reduceSizeScalar = scalar(reduceSize);
// Cast if needed.
const xReduce =
reduceSizeScalar.dtype === x.dtype ? x : x.cast(reduceSizeScalar.dtype);
const res = xReduce.div(reduceSizeScalar);
const value = res.sum(axis, keepDims);
const gradFunc = (dy: Tensor) => {
const expandedDyShape = x.shape.slice();
axes.forEach(axis => {
expandedDyShape[axis] = 1;
});
const expandedDy = dy.reshape(expandedDyShape);
const derX = expandedDy.mul(ones(x.shape, 'float32')).div(reduceSize);
return derX;
};
return {value, gradFunc};
});
return customOp($x) as T;
}
/**
* Gradient helper function for the min and max operations.
*/
function gradForMinAndMax<T extends Tensor>(
dy: T, y: T, xOrig: Tensor, origAxes: number[], permutedAxes: number[]) {
if (y.rank < xOrig.rank) {
y = y.reshape(axis_util.expandShapeToKeepDim(y.shape, origAxes)) as T;
}
if (dy.rank < xOrig.rank) {
dy = dy.reshape(axis_util.expandShapeToKeepDim(dy.shape, origAxes)) as T;
}
return {
$x: () => {
const dx = dy.mul(xOrig.equal(y).cast(dy.dtype));
return permutedAxes == null ? dx : dx.transpose(permutedAxes);
}
};
}
/**
* Computes the minimum value from the input.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the array is reduced by 1 for each entry in `axes`.
* If `keepDims` is true, the reduced dimensions are retained with length 1.
* If `axes` has no entries, all dimensions are reduced, and an array with a
* single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.min().print(); // or tf.min(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.min(axis).print(); // or tf.min(x, axis)
* ```
*
* @param x The input Tensor.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function min_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'min');
const xOrig = $x;
const origAxes = util.parseAxisParam(axis, $x.shape);
let axes = origAxes;
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const grad = (dy: T, saved: Tensor[]) =>
gradForMinAndMax(dy, saved[1], saved[0], origAxes, permutedAxes);
let res = ENGINE.runKernel((backend, save) => {
const y = backend.min($x, axes);
save([xOrig, y]);
return y as T;
}, {$x}, grad);
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);
res = res.reshape(newShape) as T;
}
return res as T;
}
/**
* Computes the maximum of elements across dimensions of a `tf.Tensor`.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in
* `axes`. If `keepDims` is true, the reduced dimensions are retained with
* length 1. If `axes` has no entries, all dimensions are reduced, and an
* `tf.Tensor` with a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.max().print(); // or tf.max(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 3, 4], [2, 2]);
*
* const axis = 1;
* x.max(axis).print(); // or tf.max(x, axis)
* ```
*
* @param x The input tensor.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function max_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'max');
const xOrig = $x;
const origAxes = util.parseAxisParam(axis, $x.shape);
let axes = origAxes;
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const grad = (dy: T, saved: Tensor[]) =>
gradForMinAndMax(dy, saved[1], saved[0], origAxes, permutedAxes);
let res = ENGINE.runKernel((backend, save) => {
const y = backend.max($x, axes);
save([xOrig, y]);
return y;
}, {$x}, grad);
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);
res = res.reshape(newShape) as T;
}
return res as T;
}
/**
* Returns the indices of the minimum values along an `axis`.
*
* The result has the same shape as `input` with the dimension along `axis`
* removed.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.argMin().print(); // or tf.argMin(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);
*
* const axis = 1;
* x.argMin(axis).print(); // or tf.argMin(x, axis)
* ```
*
* @param x The input tensor.
* @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).
*
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function argMin_<T extends Tensor>(x: Tensor|TensorLike, axis = 0): T {
let $x = convertToTensor(x, 'x', 'argMin');
if (axis == null) {
axis = 0;
}
let axes = util.parseAxisParam(axis, $x.shape);
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const grad = (dy: T, saved: Tensor[]) => {
const [$x] = saved;
return {$x: () => zerosLike($x)};
};
return ENGINE.runKernel((backend, save) => {
const res = backend.argMin($x, axes[0]);
save([$x]);
return res;
}, {$x}, grad) as T;
}
/**
* Returns the indices of the maximum values along an `axis`.
*
* The result has the same shape as `input` with the dimension along `axis`
* removed.
*
* ```js
* const x = tf.tensor1d([1, 2, 3]);
*
* x.argMax().print(); // or tf.argMax(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 2, 4, 3], [2, 2]);
*
* const axis = 1;
* x.argMax(axis).print(); // or tf.argMax(x, axis)
* ```
*
* @param x The input tensor.
* @param axis The dimension to reduce. Defaults to 0 (outer-most dimension).
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function argMax_<T extends Tensor>(x: Tensor|TensorLike, axis = 0): T {
let $x = convertToTensor(x, 'x', 'argMax');
if (axis == null) {
axis = 0;
}
let axes = util.parseAxisParam(axis, $x.shape);
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const grad = (dy: T, saved: Tensor[]) => {
const [$x] = saved;
return {$x: () => zerosLike($x)};
};
return ENGINE.runKernel((backend, save) => {
const res = backend.argMax($x, axes[0]);
save([$x]);
return res;
}, {$x}, grad) as T;
}
/**
* Computes the logical and of elements across dimensions of a `tf.Tensor`.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in
* `axes`. If `keepDims` is true, the reduced dimensions are retained with
* length 1. If `axes` has no entries, all dimensions are reduced, and an
* `tf.Tensor` with a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 1, 1], 'bool');
*
* x.all().print(); // or tf.all(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');
*
* const axis = 1;
* x.all(axis).print(); // or tf.all(x, axis)
* ```
*
* @param x The input tensor. Must be of dtype bool.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function all_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'all', 'bool');
const origAxes = util.parseAxisParam(axis, $x.shape);
let axes = origAxes;
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const res = ENGINE.runKernel(backend => backend.all($x, axes), {$x});
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);
return res.reshape(newShape) as T;
}
return res as T;
}
/**
* Computes the logical or of elements across dimensions of a `tf.Tensor`.
*
* Reduces the input along the dimensions given in `axes`. Unless `keepDims`
* is true, the rank of the `tf.Tensor` is reduced by 1 for each entry in
* `axes`. If `keepDims` is true, the reduced dimensions are retained with
* length 1. If `axes` has no entries, all dimensions are reduced, and an
* `tf.Tensor` with a single element is returned.
*
* ```js
* const x = tf.tensor1d([1, 1, 1], 'bool');
*
* x.any().print(); // or tf.any(x)
* ```
*
* ```js
* const x = tf.tensor2d([1, 1, 0, 0], [2, 2], 'bool');
*
* const axis = 1;
* x.any(axis).print(); // or tf.any(x, axis)
* ```
*
* @param x The input tensor. Must be of dtype bool.
* @param axis The dimension(s) to reduce. By default it reduces
* all dimensions.
* @param keepDims If true, retains reduced dimensions with size 1.
*/
/** @doc {heading: 'Operations', subheading: 'Reduction'} */
function any_<T extends Tensor>(
x: Tensor|TensorLike, axis: number|number[] = null, keepDims = false): T {
let $x = convertToTensor(x, 'x', 'any', 'bool');
const origAxes = util.parseAxisParam(axis, $x.shape);
let axes = origAxes;
const permutedAxes = axis_util.getAxesPermutation(axes, $x.rank);
if (permutedAxes != null) {
$x = $x.transpose(permutedAxes);
axes = axis_util.getInnerMostAxes(axes.length, $x.rank);
}
const res = ENGINE.runKernel(backend => backend.any($x, axes), {$x});
if (keepDims) {
const newShape = axis_util.expandShapeToKeepDim(res.shape, origAxes);
return res.reshape(newShape) as T;
}
return res as T;
}
/**
* Calculates the mean and variance of `x`. The mean and variance are
* calculated by aggregating the contents of `x` across `axes`. If `x` is
* 1-D and `axes = [0]` this is just the mean and variance of a vector.
*
* @param x The input tensor.
* @param axis The dimension(s) along with to compute mean and
* variance. By default it reduces all dimensions.
* @param keepDims If true, the moments have the same dimensionality as the
* input.
* @return An object with two keys: `mean` and `variance`.
*/
/** @doc {heading: 'Operations', subheading: 'Normalization'} */
function moments_(
x: Tensor|TensorLike, axis: number|number[] = null,
keepDims = false): {mean: Tensor, variance: Tensor} {
x = convertToTensor(x, 'x', 'moments');
const axes = util.parseAxisParam(axis, x.shape);
const mean = x.mean(axes, keepDims);
let keepDimsShape = mean.shape;
if (!keepDims) {
keepDimsShape = axis_util.expandShapeToKeepDim(mean.shape, axes);
}
const devSquared = x.toFloat().sub(mean.reshape(keepDimsShape)).square();
const variance = devSquared.mean(axes, keepDims);
return {mean, variance};
}
export const all = op({all_});
// tslint:disable-next-line:variable-name
export const any = op({any_});
export const argMax = op({argMax_});
export const argMin = op({argMin_});
export const logSumExp = op({logSumExp_});
export const max = op({max_});
export const mean = op({mean_});
export const min = op({min_});
export const moments = op({moments_});
export const sum = op({sum_});
export const prod = op({prod_}); | the_stack |
//@ts-check
///<reference path="devkit.d.ts" />
declare namespace DevKit {
namespace FormInvoice_Field_Service_Information {
interface Header extends DevKit.Controls.IHeader {
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
/** Shows whether the invoice is active, paid, or canceled. Paid and canceled invoices are read-only and can't be edited unless they are reactivated. */
StateCode: DevKit.Controls.OptionSet;
/** Select the invoice's status. */
StatusCode: DevKit.Controls.OptionSet;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface tab_details_tab_Sections {
details_tab_section_4: DevKit.Controls.Section;
}
interface tab_Summary_tab_Sections {
addresses: DevKit.Controls.Section;
dates: DevKit.Controls.Section;
details_tab_section_3: DevKit.Controls.Section;
DynamicProperties: DevKit.Controls.Section;
HiddenFields: DevKit.Controls.Section;
invoice_information: DevKit.Controls.Section;
InvoicePeriodSection: DevKit.Controls.Section;
products: DevKit.Controls.Section;
ServicingSection: DevKit.Controls.Section;
shipping_information: DevKit.Controls.Section;
Social_Pane: DevKit.Controls.Section;
Summary_tab_section_11: DevKit.Controls.Section;
Summary_tab_section_13: DevKit.Controls.Section;
totals: DevKit.Controls.Section;
}
interface tab_details_tab extends DevKit.Controls.ITab {
Section: tab_details_tab_Sections;
}
interface tab_Summary_tab extends DevKit.Controls.ITab {
Section: tab_Summary_tab_Sections;
}
interface Tabs {
details_tab: tab_details_tab;
Summary_tab: tab_Summary_tab;
}
interface Body {
Tab: Tabs;
/** Shows the complete Bill To address. */
BillTo_Composite: DevKit.Controls.String;
/** Date and time when the record was created. */
CreatedOn: DevKit.Controls.DateTime;
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Enter the date when the products included in the invoice were delivered. */
DateDelivered: DevKit.Controls.Date;
/** Type additional information to describe the invoice, such as shipping details or product substitutions. */
Description: DevKit.Controls.String;
/** Type the discount amount for the invoice if the customer is eligible for special savings. */
DiscountAmount: DevKit.Controls.Money;
/** Type the discount rate that should be applied to the Detail Amount field, for use in calculating the Pre-Freight Amount and Total Amount values for the invoice. */
DiscountPercentage: DevKit.Controls.Decimal;
/** Enter the date by which the invoice should be paid by the customer. */
DueDate: DevKit.Controls.Date;
/** Type the cost of freight or shipping for the products included in the invoice for use in calculating the total amount due. */
FreightAmount: DevKit.Controls.Money;
/** Shows the identifying number or code of the invoice. */
InvoiceNumber: DevKit.Controls.String;
/** Select whether prices specified on the invoice are locked from any further updates. */
IsPriceLocked: DevKit.Controls.Boolean;
/** Document date of the Invoice for use in calculation of payment due date */
msdyn_InvoiceDate: DevKit.Controls.Date;
/** Whether the Invoice is for an Item-based or a service maintainence-based sale */
msdyn_OrderType: DevKit.Controls.OptionSet;
/** Type a descriptive name for the invoice. */
Name: DevKit.Controls.String;
notescontrol: DevKit.Controls.Note;
/** Choose the opportunity that the invoice is related to for reporting and analytics. */
OpportunityId: DevKit.Controls.Lookup;
/** Select the payment terms to indicate when the customer needs to pay the total amount. */
PaymentTermsCode: DevKit.Controls.OptionSet;
/** Choose the price list associated with this record to make sure the products associated with the campaign are offered at the correct prices. */
PriceLevelId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Select a shipping method for deliveries sent to this address. */
ShippingMethodCode: DevKit.Controls.OptionSet;
/** Shows the complete Ship To address. */
ShipTo_Composite: DevKit.Controls.String;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
/** Shows the total product amount due, minus any discounts. This value is added to freight and tax amounts in the calculation for the total amount due for the invoice. */
TotalAmountLessFreight: DevKit.Controls.Money;
/** Shows the sum of all existing and write-in products included on the invoice, based on the specified price list and quantities. */
TotalLineItemAmount: DevKit.Controls.Money;
/** Shows the total of the Tax amounts specified on all products included in the invoice, included in the Total Amount due calculation for the invoice. */
TotalTax: DevKit.Controls.Money;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.Controls.Lookup;
/** Select whether the products included in the invoice should be shipped to the specified address or held until the customer calls with further pick up or delivery instructions. */
WillCall: DevKit.Controls.Boolean;
}
interface Navigation {
nav_msdyn_invoice_msdyn_agreementinvoicedate_Invoice: DevKit.Controls.NavigationItem,
nav_msdyn_invoice_msdyn_orderinvoicingdate_Invoice: DevKit.Controls.NavigationItem,
nav_msdyn_invoice_msdyn_orderinvoicingsetupdate_Invoice: DevKit.Controls.NavigationItem,
nav_msdyn_invoice_msdyn_paymentdetail_Invoice: DevKit.Controls.NavigationItem,
navProcessSessions: DevKit.Controls.NavigationItem,
navProducts: DevKit.Controls.NavigationItem
}
interface ProcessProject_Service_Invoice_Process {
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface Process extends DevKit.Controls.IProcess {
Project_Service_Invoice_Process: ProcessProject_Service_Invoice_Process;
}
interface Grid {
GridServiceLines: DevKit.Controls.Grid;
invoicedetailsGrid: DevKit.Controls.Grid;
GridInvoicingPeriod: DevKit.Controls.Grid;
}
}
class FormInvoice_Field_Service_Information extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Invoice_Field_Service_Information
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Invoice_Field_Service_Information */
Body: DevKit.FormInvoice_Field_Service_Information.Body;
/** The Header section of form Invoice_Field_Service_Information */
Header: DevKit.FormInvoice_Field_Service_Information.Header;
/** The Navigation of form Invoice_Field_Service_Information */
Navigation: DevKit.FormInvoice_Field_Service_Information.Navigation;
/** The Process of form Invoice_Field_Service_Information */
Process: DevKit.FormInvoice_Field_Service_Information.Process;
/** The Grid of form Invoice_Field_Service_Information */
Grid: DevKit.FormInvoice_Field_Service_Information.Grid;
}
namespace FormInvoice_Information {
interface tab_addresses_Sections {
bill_to_address: DevKit.Controls.Section;
ship_to_address: DevKit.Controls.Section;
}
interface tab_administration_Sections {
internal_information: DevKit.Controls.Section;
}
interface tab_general_Sections {
invoice_information: DevKit.Controls.Section;
totals: DevKit.Controls.Section;
}
interface tab_notes_Sections {
notes: DevKit.Controls.Section;
}
interface tab_shipping_Sections {
dates: DevKit.Controls.Section;
description: DevKit.Controls.Section;
shipping_information: DevKit.Controls.Section;
}
interface tab_addresses extends DevKit.Controls.ITab {
Section: tab_addresses_Sections;
}
interface tab_administration extends DevKit.Controls.ITab {
Section: tab_administration_Sections;
}
interface tab_general extends DevKit.Controls.ITab {
Section: tab_general_Sections;
}
interface tab_notes extends DevKit.Controls.ITab {
Section: tab_notes_Sections;
}
interface tab_shipping extends DevKit.Controls.ITab {
Section: tab_shipping_Sections;
}
interface Tabs {
addresses: tab_addresses;
administration: tab_administration;
general: tab_general;
notes: tab_notes;
shipping: tab_shipping;
}
interface Body {
Tab: Tabs;
/** Type the city for the customer's billing address. */
BillTo_City: DevKit.Controls.String;
/** Type the country or region for the customer's billing address. */
BillTo_Country: DevKit.Controls.String;
/** Type the fax number for the customer's billing address. */
BillTo_Fax: DevKit.Controls.String;
/** Type the first line of the customer's billing address. */
BillTo_Line1: DevKit.Controls.String;
/** Type the second line of the customer's billing address. */
BillTo_Line2: DevKit.Controls.String;
/** Type the third line of the billing address. */
BillTo_Line3: DevKit.Controls.String;
/** Type a name for the customer's billing address, such as "Headquarters" or "Field office", to identify the address. */
BillTo_Name: DevKit.Controls.String;
/** Type the ZIP Code or postal code for the billing address. */
BillTo_PostalCode: DevKit.Controls.String;
/** Type the state or province for the billing address. */
BillTo_StateOrProvince: DevKit.Controls.String;
/** Type the phone number for the customer's billing address. */
BillTo_Telephone: DevKit.Controls.String;
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Enter the date when the products included in the invoice were delivered. */
DateDelivered: DevKit.Controls.Date;
/** Type additional information to describe the invoice, such as shipping details or product substitutions. */
Description: DevKit.Controls.String;
/** Type the discount amount for the invoice if the customer is eligible for special savings. */
DiscountAmount: DevKit.Controls.Money;
/** Type the discount rate that should be applied to the Detail Amount field, for use in calculating the Pre-Freight Amount and Total Amount values for the invoice. */
DiscountPercentage: DevKit.Controls.Decimal;
/** Enter the date by which the invoice should be paid by the customer. */
DueDate: DevKit.Controls.Date;
/** Type the cost of freight or shipping for the products included in the invoice for use in calculating the total amount due. */
FreightAmount: DevKit.Controls.Money;
/** Shows the identifying number or code of the invoice. */
InvoiceNumber: DevKit.Controls.String;
/** Select whether prices specified on the invoice are locked from any further updates. */
IsPriceLocked: DevKit.Controls.Boolean;
/** Type a descriptive name for the invoice. */
Name: DevKit.Controls.String;
notescontrol: DevKit.Controls.Note;
/** Choose the opportunity that the invoice is related to for reporting and analytics. */
OpportunityId: DevKit.Controls.Lookup;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
/** Select the payment terms to indicate when the customer needs to pay the total amount. */
PaymentTermsCode: DevKit.Controls.OptionSet;
/** Choose the price list associated with this record to make sure the products associated with the campaign are offered at the correct prices. */
PriceLevelId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Select a shipping method for deliveries sent to this address. */
ShippingMethodCode: DevKit.Controls.OptionSet;
/** Type the city for the customer's shipping address. */
ShipTo_City: DevKit.Controls.String;
/** Type the country or region for the customer's shipping address. */
ShipTo_Country: DevKit.Controls.String;
/** Type the fax number for the customer's shipping address. */
ShipTo_Fax: DevKit.Controls.String;
/** Type the first line of the customer's shipping address. */
ShipTo_Line1: DevKit.Controls.String;
/** Type the second line of the customer's shipping address. */
ShipTo_Line2: DevKit.Controls.String;
/** Type the third line of the shipping address. */
ShipTo_Line3: DevKit.Controls.String;
/** Type a name for the customer's shipping address, such as "Headquarters" or "Field office", to identify the address. */
ShipTo_Name: DevKit.Controls.String;
/** Type the ZIP Code or postal code for the shipping address. */
ShipTo_PostalCode: DevKit.Controls.String;
/** Type the state or province for the shipping address. */
ShipTo_StateOrProvince: DevKit.Controls.String;
/** Type the phone number for the customer's shipping address. */
ShipTo_Telephone: DevKit.Controls.String;
/** Select the invoice's status. */
StatusCode: DevKit.Controls.OptionSet;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
/** Shows the total product amount due, minus any discounts. This value is added to freight and tax amounts in the calculation for the total amount due for the invoice. */
TotalAmountLessFreight: DevKit.Controls.Money;
/** Shows the sum of all existing and write-in products included on the invoice, based on the specified price list and quantities. */
TotalLineItemAmount: DevKit.Controls.Money;
/** Shows the total of the Tax amounts specified on all products included in the invoice, included in the Total Amount due calculation for the invoice. */
TotalTax: DevKit.Controls.Money;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.Controls.Lookup;
/** Select whether the products included in the invoice should be shipped to the specified address or held until the customer calls with further pick up or delivery instructions. */
WillCall: DevKit.Controls.Boolean;
}
interface ProcessProject_Service_Invoice_Process {
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface Process extends DevKit.Controls.IProcess {
Project_Service_Invoice_Process: ProcessProject_Service_Invoice_Process;
}
}
class FormInvoice_Information extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Invoice_Information
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Invoice_Information */
Body: DevKit.FormInvoice_Information.Body;
/** The Process of form Invoice_Information */
Process: DevKit.FormInvoice_Information.Process;
}
namespace FormInvoice {
interface Header extends DevKit.Controls.IHeader {
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
/** Shows whether the invoice is active, paid, or canceled. Paid and canceled invoices are read-only and can't be edited unless they are reactivated. */
StateCode: DevKit.Controls.OptionSet;
/** Select the invoice's status. */
StatusCode: DevKit.Controls.OptionSet;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface tab_details_tab_Sections {
Social_Pane: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
}
interface tab_Summary_tab_Sections {
addresses: DevKit.Controls.Section;
dates: DevKit.Controls.Section;
description_section: DevKit.Controls.Section;
DynamicProperties: DevKit.Controls.Section;
invoice_information: DevKit.Controls.Section;
products: DevKit.Controls.Section;
sales_information: DevKit.Controls.Section;
shipping_information: DevKit.Controls.Section;
suggestionsection: DevKit.Controls.Section;
totals: DevKit.Controls.Section;
}
interface tab_details_tab extends DevKit.Controls.ITab {
Section: tab_details_tab_Sections;
}
interface tab_Summary_tab extends DevKit.Controls.ITab {
Section: tab_Summary_tab_Sections;
}
interface Tabs {
details_tab: tab_details_tab;
Summary_tab: tab_Summary_tab;
}
interface Body {
Tab: Tabs;
/** Shows the complete Bill To address. */
BillTo_Composite: DevKit.Controls.String;
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Enter the date when the products included in the invoice were delivered. */
DateDelivered: DevKit.Controls.Date;
/** Type additional information to describe the invoice, such as shipping details or product substitutions. */
Description: DevKit.Controls.String;
/** Type the discount amount for the invoice if the customer is eligible for special savings. */
DiscountAmount: DevKit.Controls.Money;
/** Type the discount rate that should be applied to the Detail Amount field, for use in calculating the Pre-Freight Amount and Total Amount values for the invoice. */
DiscountPercentage: DevKit.Controls.Decimal;
/** Enter the date by which the invoice should be paid by the customer. */
DueDate: DevKit.Controls.Date;
/** Type the cost of freight or shipping for the products included in the invoice for use in calculating the total amount due. */
FreightAmount: DevKit.Controls.Money;
/** Shows the identifying number or code of the invoice. */
InvoiceNumber: DevKit.Controls.String;
/** Select whether prices specified on the invoice are locked from any further updates. */
IsPriceLocked: DevKit.Controls.Boolean;
/** Whether the Invoice is for an Item-based or a service maintainence-based sale */
msdyn_OrderType: DevKit.Controls.OptionSet;
/** Type a descriptive name for the invoice. */
Name: DevKit.Controls.String;
notescontrol: DevKit.Controls.Note;
/** Choose the opportunity that the invoice is related to for reporting and analytics. */
OpportunityId: DevKit.Controls.Lookup;
/** Select the payment terms to indicate when the customer needs to pay the total amount. */
PaymentTermsCode: DevKit.Controls.OptionSet;
/** Choose the price list associated with this record to make sure the products associated with the campaign are offered at the correct prices. */
PriceLevelId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Select a shipping method for deliveries sent to this address. */
ShippingMethodCode: DevKit.Controls.OptionSet;
/** Shows the complete Ship To address. */
ShipTo_Composite: DevKit.Controls.String;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
/** Shows the total product amount due, minus any discounts. This value is added to freight and tax amounts in the calculation for the total amount due for the invoice. */
TotalAmountLessFreight: DevKit.Controls.Money;
/** Shows the sum of all existing and write-in products included on the invoice, based on the specified price list and quantities. */
TotalLineItemAmount: DevKit.Controls.Money;
/** Shows the total of the Tax amounts specified on all products included in the invoice, included in the Total Amount due calculation for the invoice. */
TotalTax: DevKit.Controls.Money;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.Controls.Lookup;
/** Select whether the products included in the invoice should be shipped to the specified address or held until the customer calls with further pick up or delivery instructions. */
WillCall: DevKit.Controls.Boolean;
}
interface Navigation {
navProducts: DevKit.Controls.NavigationItem
}
interface ProcessProject_Service_Invoice_Process {
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface Process extends DevKit.Controls.IProcess {
Project_Service_Invoice_Process: ProcessProject_Service_Invoice_Process;
}
interface Grid {
invoicedetailsGrid: DevKit.Controls.Grid;
}
}
class FormInvoice extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Invoice
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Invoice */
Body: DevKit.FormInvoice.Body;
/** The Header section of form Invoice */
Header: DevKit.FormInvoice.Header;
/** The Navigation of form Invoice */
Navigation: DevKit.FormInvoice.Navigation;
/** The Process of form Invoice */
Process: DevKit.FormInvoice.Process;
/** The Grid of form Invoice */
Grid: DevKit.FormInvoice.Grid;
}
namespace FormProject_Invoice {
interface Header extends DevKit.Controls.IHeader {
/** Project specific status */
msdyn_projectinvoicestatus: DevKit.Controls.OptionSet;
/** Owner Id */
OwnerId: DevKit.Controls.Lookup;
/** Shows whether the invoice is active, paid, or canceled. Paid and canceled invoices are read-only and can't be edited unless they are reactivated. */
StateCode: DevKit.Controls.OptionSet;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface tab_details_tab_Sections {
Social_Pane: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
}
interface tab_HiddenLockedFields_Sections {
tab_5_section_3: DevKit.Controls.Section;
}
interface tab_Summary_tab_project_Sections {
products: DevKit.Controls.Section;
Summary_tab_project_section_3: DevKit.Controls.Section;
Summary_tab_project_section_5: DevKit.Controls.Section;
tab_4_section_1: DevKit.Controls.Section;
tab_5_section_1: DevKit.Controls.Section;
tab_5_section_2: DevKit.Controls.Section;
totals: DevKit.Controls.Section;
}
interface tab_details_tab extends DevKit.Controls.ITab {
Section: tab_details_tab_Sections;
}
interface tab_HiddenLockedFields extends DevKit.Controls.ITab {
Section: tab_HiddenLockedFields_Sections;
}
interface tab_Summary_tab_project extends DevKit.Controls.ITab {
Section: tab_Summary_tab_project_Sections;
}
interface Tabs {
details_tab: tab_details_tab;
HiddenLockedFields: tab_HiddenLockedFields;
Summary_tab_project: tab_Summary_tab_project;
}
interface Body {
Tab: Tabs;
/** Shows the complete Bill To address. */
BillTo_Composite: DevKit.Controls.String;
/** Type a name for the customer's billing address, such as "Headquarters" or "Field office", to identify the address. */
BillTo_Name: DevKit.Controls.String;
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Type additional information to describe the invoice, such as shipping details or product substitutions. */
Description: DevKit.Controls.String;
/** Type the discount amount for the invoice if the customer is eligible for special savings. */
DiscountAmount: DevKit.Controls.Money;
/** Type the discount rate that should be applied to the Detail Amount field, for use in calculating the Pre-Freight Amount and Total Amount values for the invoice. */
DiscountPercentage: DevKit.Controls.Decimal;
/** Enter the date by which the invoice should be paid by the customer. */
DueDate: DevKit.Controls.Date;
/** Type the cost of freight or shipping for the products included in the invoice for use in calculating the total amount due. */
FreightAmount: DevKit.Controls.Money;
/** Shows the identifying number or code of the invoice. */
InvoiceNumber: DevKit.Controls.String;
/** Type the primary contact name at the customer's billing address. */
msdyn_billtocontactname: DevKit.Controls.String;
/** Whether the Invoice is for an Item-based or a service maintainence-based sale */
msdyn_OrderType: DevKit.Controls.OptionSet;
/** Type a descriptive name for the invoice. */
Name: DevKit.Controls.String;
notescontrol: DevKit.Controls.Note;
/** Choose the opportunity that the invoice is related to for reporting and analytics. */
OpportunityId: DevKit.Controls.Lookup;
/** Select the payment terms to indicate when the customer needs to pay the total amount. */
PaymentTermsCode: DevKit.Controls.OptionSet;
/** Choose the price list associated with this record to make sure the products associated with the campaign are offered at the correct prices. */
PriceLevelId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
/** Shows the total product amount due, minus any discounts. This value is added to freight and tax amounts in the calculation for the total amount due for the invoice. */
TotalAmountLessFreight: DevKit.Controls.Money;
/** Shows the sum of all existing and write-in products included on the invoice, based on the specified price list and quantities. */
TotalLineItemAmount: DevKit.Controls.Money;
/** Shows the total of the Tax amounts specified on all products included in the invoice, included in the Total Amount due calculation for the invoice. */
TotalTax: DevKit.Controls.Money;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.Controls.Lookup;
/** Select whether the products included in the invoice should be shipped to the specified address or held until the customer calls with further pick up or delivery instructions. */
WillCall: DevKit.Controls.Boolean;
}
interface Navigation {
navProducts: DevKit.Controls.NavigationItem
}
interface ProcessProject_Service_Invoice_Process {
/** Select the customer account or contact to provide a quick link to additional customer details, such as account information, activities, and opportunities. */
CustomerId: DevKit.Controls.Lookup;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.Controls.Lookup;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.Controls.Money;
}
interface Process extends DevKit.Controls.IProcess {
Project_Service_Invoice_Process: ProcessProject_Service_Invoice_Process;
}
interface Grid {
ProjectInvoiceLines: DevKit.Controls.Grid;
invoicedetailsGrid: DevKit.Controls.Grid;
}
}
class FormProject_Invoice extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Project_Invoice
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Project_Invoice */
Body: DevKit.FormProject_Invoice.Body;
/** The Header section of form Project_Invoice */
Header: DevKit.FormProject_Invoice.Header;
/** The Navigation of form Project_Invoice */
Navigation: DevKit.FormProject_Invoice.Navigation;
/** The Process of form Project_Invoice */
Process: DevKit.FormProject_Invoice.Process;
/** The Grid of form Project_Invoice */
Grid: DevKit.FormProject_Invoice.Grid;
}
class InvoiceApi {
/**
* DynamicsCrm.DevKit InvoiceApi
* @param entity The entity object
*/
constructor(entity?: any);
/**
* Get the value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedValue(alias: string, isMultiOptionSet?: boolean): any;
/**
* Get the formatted value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string;
/** The entity object */
Entity: any;
/** The entity name */
EntityName: string;
/** The entity collection name */
EntityCollectionName: string;
/** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */
"@odata.etag": string;
/** Unique identifier of the account with which the invoice is associated. */
AccountId: DevKit.WebApi.LookupValueReadonly;
/** Type the city for the customer's billing address. */
BillTo_City: DevKit.WebApi.StringValue;
/** Shows the complete Bill To address. */
BillTo_Composite: DevKit.WebApi.StringValueReadonly;
/** Type the country or region for the customer's billing address. */
BillTo_Country: DevKit.WebApi.StringValue;
/** Type the fax number for the customer's billing address. */
BillTo_Fax: DevKit.WebApi.StringValue;
/** Type the first line of the customer's billing address. */
BillTo_Line1: DevKit.WebApi.StringValue;
/** Type the second line of the customer's billing address. */
BillTo_Line2: DevKit.WebApi.StringValue;
/** Type the third line of the billing address. */
BillTo_Line3: DevKit.WebApi.StringValue;
/** Type a name for the customer's billing address, such as "Headquarters" or "Field office", to identify the address. */
BillTo_Name: DevKit.WebApi.StringValue;
/** Type the ZIP Code or postal code for the billing address. */
BillTo_PostalCode: DevKit.WebApi.StringValue;
/** Type the state or province for the billing address. */
BillTo_StateOrProvince: DevKit.WebApi.StringValue;
/** Type the phone number for the customer's billing address. */
BillTo_Telephone: DevKit.WebApi.StringValue;
/** Unique identifier of the contact associated with the invoice. */
ContactId: DevKit.WebApi.LookupValueReadonly;
/** Shows who created the record. */
CreatedBy: DevKit.WebApi.LookupValueReadonly;
/** Date and time when the record was created. */
CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who created the record on behalf of another user. */
CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
customerid_account: DevKit.WebApi.LookupValue;
customerid_contact: DevKit.WebApi.LookupValue;
/** Enter the date when the products included in the invoice were delivered. */
DateDelivered_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Type additional information to describe the invoice, such as shipping details or product substitutions. */
Description: DevKit.WebApi.StringValue;
/** Type the discount amount for the invoice if the customer is eligible for special savings. */
DiscountAmount: DevKit.WebApi.MoneyValue;
/** Value of the Invoice Discount Amount in base currency. */
DiscountAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Type the discount rate that should be applied to the Detail Amount field, for use in calculating the Pre-Freight Amount and Total Amount values for the invoice. */
DiscountPercentage: DevKit.WebApi.DecimalValue;
/** Enter the date by which the invoice should be paid by the customer. */
DueDate_DateOnly: DevKit.WebApi.DateOnlyValue;
/** The primary email address for the entity. */
EmailAddress: DevKit.WebApi.StringValue;
/** The default image for the entity. */
EntityImage: DevKit.WebApi.StringValue;
EntityImage_Timestamp: DevKit.WebApi.BigIntValueReadonly;
EntityImage_URL: DevKit.WebApi.StringValueReadonly;
EntityImageId: DevKit.WebApi.GuidValueReadonly;
/** Shows the conversion rate of the record's currency. The exchange rate is used to convert all money fields in the record from the local currency to the system's default currency. */
ExchangeRate: DevKit.WebApi.DecimalValueReadonly;
/** Type the cost of freight or shipping for the products included in the invoice for use in calculating the total amount due. */
FreightAmount: DevKit.WebApi.MoneyValue;
/** Value of the Freight Amount in base currency. */
FreightAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Sequence number of the import that created this record. */
ImportSequenceNumber: DevKit.WebApi.IntegerValue;
/** Unique identifier of the invoice. */
InvoiceId: DevKit.WebApi.GuidValue;
/** Shows the identifying number or code of the invoice. */
InvoiceNumber: DevKit.WebApi.StringValue;
/** Select whether prices specified on the invoice are locked from any further updates. */
IsPriceLocked: DevKit.WebApi.BooleanValue;
/** Enter the date and time when the invoice was last submitted to an accounting or ERP system for processing. */
LastBackofficeSubmit_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Contains the date time stamp of the last on hold time. */
LastOnHoldTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Shows who last updated the record. */
ModifiedBy: DevKit.WebApi.LookupValueReadonly;
/** Date and time when the record was modified. */
ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who last updated the record on behalf of another user. */
ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Enter the amount due on this invoice. */
msdyn_AmountDue: DevKit.WebApi.MoneyValue;
/** Value of the Amount Due in base currency. */
msdyn_amountdue_Base: DevKit.WebApi.MoneyValueReadonly;
/** Type the primary contact name at the customer's billing address. */
msdyn_billtocontactname: DevKit.WebApi.StringValue;
/** Indicates if this invoice contains corrections to previous invoices. */
msdyn_HasCorrections: DevKit.WebApi.BooleanValue;
/** Document date of the Invoice for use in calculation of payment due date */
msdyn_InvoiceDate_TimezoneDateOnly: DevKit.WebApi.TimezoneDateOnlyValue;
/** Whether the Invoice is for an Item-based or a service maintainence-based sale */
msdyn_OrderType: DevKit.WebApi.OptionSetValue;
/** Project specific status */
msdyn_projectinvoicestatus: DevKit.WebApi.OptionSetValue;
/** Type a descriptive name for the invoice. */
Name: DevKit.WebApi.StringValue;
/** Shows the duration in minutes for which the invoice was on hold. */
OnHoldTime: DevKit.WebApi.IntegerValueReadonly;
/** Choose the opportunity that the invoice is related to for reporting and analytics. */
OpportunityId: DevKit.WebApi.LookupValue;
/** Date and time that the record was migrated. */
OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */
OwnerId_systemuser: DevKit.WebApi.LookupValue;
/** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */
OwnerId_team: DevKit.WebApi.LookupValue;
/** Unique identifier for the business unit that owns the record */
OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the team that owns the record. */
OwningTeam: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier for the user that owns the record. */
OwningUser: DevKit.WebApi.LookupValueReadonly;
/** Select the payment terms to indicate when the customer needs to pay the total amount. */
PaymentTermsCode: DevKit.WebApi.OptionSetValue;
/** Choose the price list associated with this record to make sure the products associated with the campaign are offered at the correct prices. */
PriceLevelId: DevKit.WebApi.LookupValue;
/** Type of pricing error for the invoice. */
PricingErrorCode: DevKit.WebApi.OptionSetValue;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.WebApi.OptionSetValue;
/** Contains the id of the process associated with the entity. */
ProcessId: DevKit.WebApi.GuidValue;
/** Choose the order related to the invoice to make sure the order is fulfilled and invoiced correctly. */
SalesOrderId: DevKit.WebApi.LookupValue;
/** Select a shipping method for deliveries sent to this address. */
ShippingMethodCode: DevKit.WebApi.OptionSetValue;
/** Type the city for the customer's shipping address. */
ShipTo_City: DevKit.WebApi.StringValue;
/** Shows the complete Ship To address. */
ShipTo_Composite: DevKit.WebApi.StringValueReadonly;
/** Type the country or region for the customer's shipping address. */
ShipTo_Country: DevKit.WebApi.StringValue;
/** Type the fax number for the customer's shipping address. */
ShipTo_Fax: DevKit.WebApi.StringValue;
/** Select the freight terms to make sure shipping orders are processed correctly. */
ShipTo_FreightTermsCode: DevKit.WebApi.OptionSetValue;
/** Type the first line of the customer's shipping address. */
ShipTo_Line1: DevKit.WebApi.StringValue;
/** Type the second line of the customer's shipping address. */
ShipTo_Line2: DevKit.WebApi.StringValue;
/** Type the third line of the shipping address. */
ShipTo_Line3: DevKit.WebApi.StringValue;
/** Type a name for the customer's shipping address, such as "Headquarters" or "Field office", to identify the address. */
ShipTo_Name: DevKit.WebApi.StringValue;
/** Type the ZIP Code or postal code for the shipping address. */
ShipTo_PostalCode: DevKit.WebApi.StringValue;
/** Type the state or province for the shipping address. */
ShipTo_StateOrProvince: DevKit.WebApi.StringValue;
/** Type the phone number for the customer's shipping address. */
ShipTo_Telephone: DevKit.WebApi.StringValue;
/** Skip Price Calculation (For Internal Use) */
SkipPriceCalculation: DevKit.WebApi.OptionSetValue;
/** Choose the service level agreement (SLA) that you want to apply to the invoice record. */
SLAId: DevKit.WebApi.LookupValue;
/** Last SLA that was applied to this invoice. This field is for internal use only. */
SLAInvokedId: DevKit.WebApi.LookupValueReadonly;
SLAName: DevKit.WebApi.StringValueReadonly;
/** Contains the id of the stage where the entity is located. */
StageId: DevKit.WebApi.GuidValue;
/** Shows whether the invoice is active, paid, or canceled. Paid and canceled invoices are read-only and can't be edited unless they are reactivated. */
StateCode: DevKit.WebApi.OptionSetValue;
/** Select the invoice's status. */
StatusCode: DevKit.WebApi.OptionSetValue;
/** For internal use only. */
TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue;
/** Shows the total amount due, calculated as the sum of the products, discount, freight, and taxes for the invoice. */
TotalAmount: DevKit.WebApi.MoneyValue;
/** Value of the Total Amount in base currency. */
TotalAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Shows the total product amount due, minus any discounts. This value is added to freight and tax amounts in the calculation for the total amount due for the invoice. */
TotalAmountLessFreight: DevKit.WebApi.MoneyValue;
/** Value of the Total Pre-Freight Amount in base currency. */
TotalAmountLessFreight_Base: DevKit.WebApi.MoneyValueReadonly;
/** Shows the total discount amount, based on the discount price and rate entered on the invoice. */
TotalDiscountAmount: DevKit.WebApi.MoneyValue;
/** Value of the Total Discount Amount in base currency. */
TotalDiscountAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Shows the sum of all existing and write-in products included on the invoice, based on the specified price list and quantities. */
TotalLineItemAmount: DevKit.WebApi.MoneyValue;
/** Value of the Total Detail Amount in base currency. */
TotalLineItemAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Shows the Manual Discount amounts specified on all products included in the invoice. This value is reflected in the Detail Amount field on the invoice and is added to any discount amount or rate specified on the invoice. */
TotalLineItemDiscountAmount: DevKit.WebApi.MoneyValue;
/** Value of the Total Line Item Discount Amount in base currency. */
TotalLineItemDiscountAmount_Base: DevKit.WebApi.MoneyValueReadonly;
/** Shows the total of the Tax amounts specified on all products included in the invoice, included in the Total Amount due calculation for the invoice. */
TotalTax: DevKit.WebApi.MoneyValue;
/** Value of the Total Tax in base currency. */
TotalTax_Base: DevKit.WebApi.MoneyValueReadonly;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.WebApi.LookupValue;
/** A comma separated list of string values representing the unique identifiers of stages in a Business Process Flow Instance in the order that they occur. */
TraversedPath: DevKit.WebApi.StringValue;
/** Time zone code that was in use when the record was created. */
UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue;
/** Version Number */
VersionNumber: DevKit.WebApi.BigIntValueReadonly;
/** Select whether the products included in the invoice should be shipped to the specified address or held until the customer calls with further pick up or delivery instructions. */
WillCall: DevKit.WebApi.BooleanValue;
}
}
declare namespace OptionSet {
namespace Invoice {
enum msdyn_OrderType {
/** 192350000 */
Item_based,
/** 690970002 */
Service_Maintenance_Based,
/** 192350001 */
Work_based
}
enum msdyn_projectinvoicestatus {
/** 192350002 */
Confirmed,
/** 192350000 */
Draft,
/** 192350001 */
In_Review,
/** 192350003 */
Invoice_Paid
}
enum PaymentTermsCode {
/** 2 */
_2_10_Net_30,
/** 1 */
Net_30,
/** 3 */
Net_45,
/** 4 */
Net_60
}
enum PricingErrorCode {
/** 36 */
Base_Currency_Attribute_Overflow,
/** 37 */
Base_Currency_Attribute_Underflow,
/** 1 */
Detail_Error,
/** 27 */
Discount_Type_Invalid_State,
/** 33 */
Inactive_Discount_Type,
/** 3 */
Inactive_Price_Level,
/** 20 */
Invalid_Current_Cost,
/** 28 */
Invalid_Discount,
/** 26 */
Invalid_Discount_Type,
/** 19 */
Invalid_Price,
/** 17 */
Invalid_Price_Level_Amount,
/** 34 */
Invalid_Price_Level_Currency,
/** 18 */
Invalid_Price_Level_Percentage,
/** 9 */
Invalid_Pricing_Code,
/** 30 */
Invalid_Pricing_Precision,
/** 7 */
Invalid_Product,
/** 29 */
Invalid_Quantity,
/** 24 */
Invalid_Rounding_Amount,
/** 23 */
Invalid_Rounding_Option,
/** 22 */
Invalid_Rounding_Policy,
/** 21 */
Invalid_Standard_Cost,
/** 15 */
Missing_Current_Cost,
/** 14 */
Missing_Price,
/** 2 */
Missing_Price_Level,
/** 12 */
Missing_Price_Level_Amount,
/** 13 */
Missing_Price_Level_Percentage,
/** 8 */
Missing_Pricing_Code,
/** 6 */
Missing_Product,
/** 31 */
Missing_Product_Default_UOM,
/** 32 */
Missing_Product_UOM_Schedule_,
/** 4 */
Missing_Quantity,
/** 16 */
Missing_Standard_Cost,
/** 5 */
Missing_Unit_Price,
/** 10 */
Missing_UOM,
/** 0 */
None,
/** 35 */
Price_Attribute_Out_Of_Range,
/** 25 */
Price_Calculation_Error,
/** 11 */
Product_Not_In_Price_Level,
/** 38 */
Transaction_currency_is_not_set_for_the_product_price_list_item
}
enum PriorityCode {
/** 1 */
Default_Value
}
enum ShippingMethodCode {
/** 1 */
Airborne,
/** 2 */
DHL,
/** 3 */
FedEx,
/** 6 */
Full_Load,
/** 5 */
Postal_Mail,
/** 4 */
UPS,
/** 7 */
Will_Call
}
enum ShipTo_FreightTermsCode {
/** 1 */
Default_Value
}
enum SkipPriceCalculation {
/** 0 */
DoPriceCalcAlways,
/** 1 */
SkipPriceCalcOnRetrieve
}
enum StateCode {
/** 0 */
Active,
/** 3 */
Canceled,
/** 1 */
Closed_deprecated,
/** 2 */
Paid
}
enum StatusCode {
/** 4 */
Billed,
/** 5 */
Booked_applies_to_services,
/** 100003 */
Canceled,
/** 3 */
Canceled_deprecated,
/** 100001 */
Complete,
/** 6 */
Installed_applies_to_services,
/** 1 */
New,
/** 7 */
Paid_in_Full_deprecated,
/** 100002 */
Partial,
/** 2 */
Partially_Shipped
}
enum RollupState {
/** 0 - Attribute value is yet to be calculated */
NotCalculated,
/** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */
Calculated,
/** 2 - Attribute value calculation lead to overflow error */
OverflowError,
/** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */
OtherError,
/** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */
RetryLimitExceeded,
/** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */
HierarchicalRecursionLimitReached,
/** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */
LoopDetected
}
}
}
//{'JsForm':['Field Service Information','Information','Invoice','Project Invoice'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'} | the_stack |
import { Location, RouteActionType } from '../interface/common';
import { RouterDriver, RouteRecord, RouteDriverEventType } from '../interface/driver';
import { RouteManager, MatchedRoute } from '../interface/routeManager';
import {
BaseNavigationOptions,
NavigationOptions,
PopNavigationOptions,
RouteConfig,
RouteInfo,
Router,
RouterConfig,
RouterEventMap,
RouterOption,
preActionCallback,
RouteEventType
} from '../interface/router';
import { isNameLocation, isPathnameLocation, normalizePath } from '../utils/helpers';
import { parseToSearchStr } from '../utils/url';
import BaseEventEmitter from './EventEmitter';
import TreeRouteManager from './route/RouteManager';
type IRouteAndConfig<Component> = Omit<RouteInfo<Component>, 'index'>;
interface DriverPayload {
transition?: string;
}
/**
* Router
*
* @export
* @class Router
* @extends {BaseEventEmitter<RouterEventMap<Component>>}
* @implements {IRouter<Component>}
* @template Component
*/
export default class StackRouter<Component> extends BaseEventEmitter<RouterEventMap<Component>>
implements Router<Component> {
public get currentRouteInfo(): RouteInfo<Component> | undefined {
const routeAndConfig = this.routeStack[this.routeStack.length - 1];
if (!routeAndConfig) {
return;
}
return Object.assign(routeAndConfig, { index: this.routeStack.length - 1 });
}
private routeManager: RouteManager<RouteConfig<Component>>;
private routeStack: Array<IRouteAndConfig<Component>> = [];
private driver: RouterDriver;
private config: RouterConfig = { base: '/' };
constructor(
option: RouterOption<Component>,
driver: RouterDriver,
routeManager?: RouteManager<RouteConfig<Component>>
) {
super();
this.routeManager = routeManager || new TreeRouteManager<RouteConfig<Component>>();
this.driver = driver;
Object.assign(this.config, option.config);
option.routes.forEach(route => this.routeManager.register(route));
this.initDriverListener();
this.initRouteInfo();
}
public prepush<T extends Partial<NavigationOptions>>(location: Location<T>): preActionCallback {
const { path, state, transition } = this.getNormalizedLocation<T>(location);
const id = this.driver.generateNextId();
const routeInfo = this.getRouteInfo(id, path, state);
if (routeInfo === undefined) {
this.driver.deprecateNextId();
return (): void => undefined;
}
const nextRouteInfo: RouteInfo<Component> = Object.assign({}, routeInfo, { index: this.routeStack.length });
this.emit(RouteEventType.WILL_CHANGE, RouteActionType.PUSH, nextRouteInfo, transition);
return (cancel?: boolean): void => {
if (cancel) {
this.driver.deprecateNextId();
this.emit(RouteEventType.CANCEL_CHANGE, nextRouteInfo);
} else {
this.push(location);
}
};
}
public prepop<T extends Partial<PopNavigationOptions>>(option?: T): preActionCallback {
let { n = 1 } = option || {};
if (this.routeStack.length > 1 && n > this.routeStack.length - 1) {
n = this.routeStack.length - 1;
}
const index = this.routeStack.length - n - 1;
if (index < 0) {
return (): void => undefined;
}
const nextRouteInfo = Object.assign(this.routeStack[index], { index });
this.emit(RouteEventType.WILL_CHANGE, RouteActionType.POP, nextRouteInfo, option && option.transition);
return (cancel?: boolean): void => {
if (cancel) {
this.emit(RouteEventType.CANCEL_CHANGE, nextRouteInfo);
} else {
this.pop(option);
}
};
}
public prereplace<T extends Partial<NavigationOptions>>(location: Location<T>): preActionCallback {
const { path, state, transition } = this.getNormalizedLocation<T>(location);
const id = this.driver.generateNextId();
const routeInfo = this.getRouteInfo(id, path, state);
if (routeInfo === undefined) {
this.driver.deprecateNextId();
return (): void => undefined;
}
const nextRouteInfo: RouteInfo<Component> = Object.assign({}, routeInfo, { index: this.routeStack.length - 1 });
this.emit(RouteEventType.WILL_CHANGE, RouteActionType.REPLACE, nextRouteInfo, transition);
return (cancel?: boolean): void => {
if (cancel) {
this.driver.deprecateNextId();
this.emit(RouteEventType.CANCEL_CHANGE, nextRouteInfo);
} else {
this.replace(location);
}
};
}
/**
* Push a new page into the stack
*
* @param {string | Location} location
* @memberof Router
*/
public push<T extends Partial<NavigationOptions>>(location: Location<T>): void {
const { path, state, transition } = this.getNormalizedLocation<T>(location);
const payload: DriverPayload = { transition };
this.driver.push(path, state, payload);
}
/**
* Pop the page on the top of stack
*
* @template T
* @param {Partial<T>} [option]
* @returns {void}
* @memberof Router
*/
public pop<T extends Partial<PopNavigationOptions>>(option?: T): void {
const { n = 1 } = option || {};
const { transition } = option || {};
const payload: DriverPayload = { transition };
this.driver.pop(n, payload);
}
/**
* Pop the current page
*
* @template T
* @param {(Location<T>)} location
* @memberof Router
*/
public replace<T extends Partial<NavigationOptions>>(location: Location<T>): void {
const { path, state, transition: transition } = this.getNormalizedLocation<T>(location);
const payload: DriverPayload = { transition };
this.driver.replace(path, state, payload);
}
/**
* Pop to the bottom stack
*
* @template T
* @param {Partial<T>} [option]
* @memberof Router
*/
public popToBottom<T extends BaseNavigationOptions>(option?: Partial<T>): void {
const popOption = Object.assign({ n: this.routeStack.length - 1 }, option);
this.pop(popOption);
}
public registerRoutes(routes: Array<RouteConfig<Component>>): void {
routes.forEach(route => this.routeManager.register(route));
if (this.routeStack.length === 0) {
this.initRouteInfo();
}
}
private initRouteInfo(): void {
const { id, path, state } = this.driver.getCurrentRouteRecord();
const routeInfo = this.getRouteInfo(id, path, state);
if (routeInfo !== undefined) {
this.routeStack.push(routeInfo);
if (routeInfo.route.redirected) {
this.driver.changePath(routeInfo.route.path);
}
}
}
private initDriverListener(): void {
this.driver.on(RouteDriverEventType.CHANGE, (type: RouteActionType, routeRecord: RouteRecord, payload: unknown) =>
this.handleRouteChange(type, routeRecord, payload)
);
}
private getNormalizedLocation<T extends Partial<NavigationOptions>>(
location: Location<T>
): {
path: string;
state: unknown;
transition: string | undefined;
} {
if (typeof location === 'string') {
return {
path: normalizePath(location),
state: undefined,
transition: undefined
};
}
let pathname = '';
if (isPathnameLocation(location)) {
pathname = normalizePath(location.pathname);
}
if (isNameLocation(location)) {
pathname = this.routeManager.getPathnameByRouteName(location.name, location.params) || '';
}
let queryStr = '';
if (location.query) {
queryStr = parseToSearchStr(location.query);
}
return {
path: `${pathname}${queryStr}`,
state: location.state,
transition: location.transition
};
}
private matchRoute(
path: string
): MatchedRoute<RouteConfig<Component>> & { redirected: boolean; path: string } | undefined {
let currentPath = path;
let matchedRoute = this.routeManager.match(currentPath);
let redirected = false;
while (matchedRoute && matchedRoute.config.redirect !== undefined) {
const {
params,
pathname,
query,
config: { redirect }
} = matchedRoute;
const newLocation = typeof redirect === 'function' ? redirect({ pathname, query, params }) : redirect;
const normalizedLocation = this.getNormalizedLocation(newLocation);
if (path === normalizedLocation.path) {
break;
}
matchedRoute = this.routeManager.match(normalizedLocation.path);
currentPath = normalizedLocation.path;
redirected = true;
}
if (matchedRoute) {
return Object.assign(matchedRoute, { redirected, path: currentPath });
}
// warn('');
}
private getRouteInfo(id: string, path: string, state?: unknown): IRouteAndConfig<Component> | undefined {
const matchedRoute = this.matchRoute(path);
if (matchedRoute === undefined) {
return;
}
const { config, query, params, pathname, redirected, path: newPath } = matchedRoute;
return {
route: {
id,
name: config.name || '',
path: newPath,
pathname,
query,
params,
state,
redirected
},
config
};
}
private componentChange(type: RouteActionType, transitionOptions?: unknown): void {
this.emit(RouteEventType.CHANGE, type, this.currentRouteInfo, transitionOptions);
}
private handleRouteChange(type: RouteActionType, routeRecord: RouteRecord, payload: unknown): void {
const { id, path, state } = routeRecord;
const routeInfo = this.getRouteInfo(id, path, state);
if (routeInfo === undefined) {
return;
}
if (routeInfo.route.redirected) {
this.driver.changePath(routeInfo.route.path);
}
const transition = this.isDriverPayload(payload) ? payload.transition : undefined;
this.updateRouteRecords(type, routeInfo, transition);
}
private updateRouteRecords(type: RouteActionType, routeInfo: IRouteAndConfig<Component>, transition?: unknown): void {
switch (type) {
case RouteActionType.PUSH:
this.routeStack.push(routeInfo);
this.componentChange(type, transition);
break;
case RouteActionType.REPLACE:
const preRoute = this.routeStack.pop();
this.routeStack.push(routeInfo);
this.componentChange(type, transition);
if (preRoute) {
this.emit(RouteEventType.DESTROY, [preRoute.route.id]);
}
break;
case RouteActionType.POP:
const index = this.routeStack.findIndex(i => routeInfo.route.id === i.route.id);
if (index === -1) {
this.routeStack = [routeInfo];
this.componentChange(type, transition);
} else {
const destroyedIds = this.routeStack
.splice(index + 1, this.routeStack.length - index - 1)
.map(r => r.route.id);
this.componentChange(type, transition);
this.emit(RouteEventType.DESTROY, destroyedIds);
}
break;
default:
this.componentChange(type, transition);
}
}
private isDriverPayload(payload: unknown): payload is DriverPayload {
return typeof payload === 'object' && payload !== null && payload.hasOwnProperty('transition');
}
} | the_stack |
import { promises as fs } from 'fs'
import path from 'path'
import { Lockfile } from '@pnpm/lockfile-types'
import prepare, { preparePackages } from '@pnpm/prepare'
import readYamlFile from 'read-yaml-file'
import writeYamlFile from 'write-yaml-file'
import {
addDistTag,
execPnpm,
execPnpmSync,
} from '../utils'
test('readPackage hook', async () => {
const project = prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['install', 'pkg-with-1-dep'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
})
test('readPackage hook makes installation fail if it does not return the modified package manifests', async () => {
prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {}
}
}
`, 'utf8')
const result = execPnpmSync(['install', 'pkg-with-1-dep'])
expect(result.status).toBe(1)
})
test('readPackage hook from custom location', async () => {
const project = prepare()
await fs.writeFile('pnpm.js', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['install', 'pkg-with-1-dep', '--pnpmfile', 'pnpm.js'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
})
test('readPackage hook from global pnpmfile', async () => {
const project = prepare()
await fs.writeFile('../.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['install', 'pkg-with-1-dep', '--global-pnpmfile', path.resolve('..', '.pnpmfile.cjs')])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
})
test('readPackage hook from global pnpmfile and local pnpmfile', async () => {
const project = prepare()
await fs.writeFile('../.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
pkg.dependencies['is-positive'] = '3.0.0'
}
return pkg
}
}
}
`, 'utf8')
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['is-positive'] = '1.0.0'
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['install', 'pkg-with-1-dep', '--global-pnpmfile', path.resolve('..', '.pnpmfile.cjs')])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
await project.storeHas('is-positive', '1.0.0')
})
test('readPackage hook from pnpmfile at root of workspace', async () => {
const projects = preparePackages([
{
name: 'project-1',
version: '1.0.0',
dependencies: {
'is-positive': '1.0.0',
},
},
])
const pnpmfile = `
module.exports = { hooks: { readPackage } }
function readPackage (pkg) {
pkg.dependencies = pkg.dependencies || {}
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.1.0'
return pkg
}
`
await fs.writeFile('.pnpmfile.cjs', pnpmfile, 'utf8')
await writeYamlFile('pnpm-workspace.yaml', { packages: ['project-1'] })
const storeDir = path.resolve('store')
await execPnpm(['recursive', 'install', '--store-dir', storeDir])
process.chdir('project-1')
await execPnpm(['install', 'is-negative@1.0.0', '--store-dir', storeDir])
await projects['project-1'].has('is-negative')
await projects['project-1'].has('is-positive')
process.chdir('..')
const lockfile = await readYamlFile<Lockfile>('pnpm-lock.yaml')
/* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */
expect(lockfile.packages!['/is-positive/1.0.0'].dependencies).toStrictEqual({
'dep-of-pkg-with-1-dep': '100.1.0',
})
expect(lockfile.packages!['/is-negative/1.0.0'].dependencies).toStrictEqual({
'dep-of-pkg-with-1-dep': '100.1.0',
})
/* eslint-enable @typescript-eslint/no-unnecessary-type-assertion */
})
test('readPackage hook during update', async () => {
const project = prepare({
dependencies: {
'pkg-with-1-dep': '*',
},
})
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['update'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
})
test('prints meaningful error when there is syntax error in .pnpmfile.cjs', async () => {
prepare()
await fs.writeFile('.pnpmfile.cjs', '/boom', 'utf8')
const proc = execPnpmSync(['install', 'pkg-with-1-dep'])
expect(proc.stderr.toString()).toContain('SyntaxError: Invalid regular expression: missing /')
expect(proc.status).toBe(1)
})
test('fails when .pnpmfile.cjs requires a non-existend module', async () => {
prepare()
await fs.writeFile('.pnpmfile.cjs', 'module.exports = require("./this-does-node-exist")', 'utf8')
const proc = execPnpmSync(['install', 'pkg-with-1-dep'])
expect(proc.stdout.toString()).toContain('Error during pnpmfile execution')
expect(proc.status).toBe(1)
})
test('ignore .pnpmfile.cjs when --ignore-pnpmfile is used', async () => {
const project = prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['install', 'pkg-with-1-dep', '--ignore-pnpmfile'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.1.0')
})
test('ignore .pnpmfile.cjs during update when --ignore-pnpmfile is used', async () => {
const project = prepare({
dependencies: {
'pkg-with-1-dep': '*',
},
})
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
}
return pkg
}
}
}
`, 'utf8')
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
await execPnpm(['update', '--ignore-pnpmfile'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.1.0')
})
test('pnpmfile: pass log function to readPackage hook', async () => {
const project = prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg, context) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
context.log('dep-of-pkg-with-1-dep pinned to 100.0.0')
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
const proc = execPnpmSync(['install', 'pkg-with-1-dep', '--reporter', 'ndjson'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
const outputs = proc.stdout.toString().split(/\r?\n/)
const hookLog = outputs.filter(Boolean)
.map((output) => JSON.parse(output))
.find((log) => log.name === 'pnpm:hook')
expect(hookLog).toBeTruthy()
expect(hookLog.prefix).toBeTruthy()
expect(hookLog.from).toBeTruthy()
expect(hookLog.hook).toBe('readPackage')
expect(hookLog.message).toBe('dep-of-pkg-with-1-dep pinned to 100.0.0')
})
test('pnpmfile: pass log function to readPackage hook of global and local pnpmfile', async () => {
const project = prepare()
await fs.writeFile('../.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg, context) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['dep-of-pkg-with-1-dep'] = '100.0.0'
pkg.dependencies['is-positive'] = '3.0.0'
context.log('is-positive pinned to 3.0.0')
}
return pkg
}
}
}
`, 'utf8')
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg, context) {
if (pkg.name === 'pkg-with-1-dep') {
pkg.dependencies['is-positive'] = '1.0.0'
context.log('is-positive pinned to 1.0.0')
}
return pkg
}
}
}
`, 'utf8')
// w/o the hook, 100.1.0 would be installed
await addDistTag('dep-of-pkg-with-1-dep', '100.1.0', 'latest')
const proc = execPnpmSync(['install', 'pkg-with-1-dep', '--global-pnpmfile', path.resolve('..', '.pnpmfile.cjs'), '--reporter', 'ndjson'])
await project.storeHas('dep-of-pkg-with-1-dep', '100.0.0')
await project.storeHas('is-positive', '1.0.0')
const outputs = proc.stdout.toString().split(/\r?\n/)
const hookLogs = outputs.filter(Boolean)
.map((output) => JSON.parse(output))
.filter((log) => log.name === 'pnpm:hook')
expect(hookLogs[0]).toBeTruthy()
expect(hookLogs[0].prefix).toBeTruthy()
expect(hookLogs[0].from).toBeTruthy()
expect(hookLogs[0].hook).toBe('readPackage')
expect(hookLogs[0].message).toBe('is-positive pinned to 3.0.0')
expect(hookLogs[1]).toBeTruthy()
expect(hookLogs[1].prefix).toBeTruthy()
expect(hookLogs[1].from).toBeTruthy()
expect(hookLogs[1].hook).toBe('readPackage')
expect(hookLogs[1].message).toBe('is-positive pinned to 1.0.0')
expect(hookLogs[0].prefix).toBe(hookLogs[1].prefix)
expect(hookLogs[0].from).not.toBe(hookLogs[1].from)
})
test('pnpmfile: run afterAllResolved hook', async () => {
prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
afterAllResolved (lockfile, context) {
context.log('All resolved')
return lockfile
}
}
}
`, 'utf8')
const proc = execPnpmSync(['install', 'pkg-with-1-dep', '--reporter', 'ndjson'])
const outputs = proc.stdout.toString().split(/\r?\n/)
const hookLog = outputs.filter(Boolean)
.map((output) => JSON.parse(output))
.find((log) => log.name === 'pnpm:hook')
expect(hookLog).toBeTruthy()
expect(hookLog.prefix).toBeTruthy()
expect(hookLog.from).toBeTruthy()
expect(hookLog.hook).toBe('afterAllResolved')
expect(hookLog.message).toBe('All resolved')
})
test('readPackage hook normalizes the package manifest', async () => {
prepare()
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
if (pkg.name === 'dep-of-pkg-with-1-dep') {
pkg.dependencies['is-positive'] = '*'
pkg.optionalDependencies['is-negative'] = '*'
pkg.peerDependencies['is-negative'] = '*'
pkg.devDependencies['is-positive'] = '*'
}
return pkg
}
}
}
`, 'utf8')
await execPnpm(['install', 'dep-of-pkg-with-1-dep'])
})
test('readPackage hook overrides project package', async () => {
const project = prepare({
name: 'test-read-package-hook',
})
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
switch (pkg.name) {
case 'test-read-package-hook':
pkg.dependencies = { 'is-positive': '1.0.0' }
break
}
return pkg
}
}
}
`, 'utf8')
await execPnpm(['install'])
await project.has('is-positive')
const pkg = await import(path.resolve('package.json'))
expect(pkg.dependencies).toBeFalsy()
})
test('readPackage hook is used during removal inside a workspace', async () => {
preparePackages([
{
name: 'project',
version: '1.0.0',
dependencies: {
abc: '1.0.0',
'is-negative': '1.0.0',
'is-positive': '1.0.0',
'peer-a': '1.0.0',
},
},
])
await writeYamlFile('pnpm-workspace.yaml', { packages: ['project-1'] })
await fs.writeFile('.pnpmfile.cjs', `
'use strict'
module.exports = {
hooks: {
readPackage (pkg) {
switch (pkg.name) {
case 'abc':
pkg.peerDependencies['is-negative'] = '1.0.0'
break
}
return pkg
}
}
}
`, 'utf8')
process.chdir('project')
await execPnpm(['install'])
await execPnpm(['uninstall', 'is-positive'])
process.chdir('..')
const lockfile = await readYamlFile<Lockfile>('pnpm-lock.yaml')
expect(lockfile.packages!['/abc/1.0.0_is-negative@1.0.0+peer-a@1.0.0'].peerDependencies!['is-negative']).toBe('1.0.0')
}) | the_stack |
import {biRnd} from "./math.js";
export type Note = 'A' | 'A#' | 'B' | 'C' | 'C#' | 'D' | 'D#' | 'E' | 'F' | 'F#' | 'G' | 'G#';
export type Octave = '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8';
export type FullNote = `${Note}${Octave}`
const lookupTable: Map<Note, number> = new Map<Note, number>();
const revLook: Map<number, Note> = new Map<number, Note>();
(()=>{
function add(note: Note, n: number) {
lookupTable.set(note, n);
revLook.set(n, note);
}
add('A', 9);
add('A#', 10);
add('B', 11);
add('C', 0);
add('C#', 1);
add('D', 2);
add('D#', 3);
add('E', 4);
add('F', 5);
add('F#', 6);
add('G', 7);
add('G#', 8);
})();
export function textNoteToNumber(note: FullNote) {
const o: Octave = note.substring(note.length - 1) as Octave;
const n: Note = note.substring(0,note.length -1) as Note;
// @ts-ignore
return parseInt(o) * 12 + lookupTable.get(n) + 12;
}
function midiNoteToFrequency(noteNumber: number) {
return 440 * Math.pow(2, (noteNumber - 69) / 12);
}
export function midiNoteToText(note: number): FullNote {
const octave = Math.floor(note / 12);
const n = Math.floor(note % 12);
const noteName = revLook.get(n) as Note;
return `${noteName}${octave}` as FullNote;
}
export function pitch(note: FullNote | number) {
if (typeof(note) === 'number') {
return midiNoteToFrequency(note);
} else {
return midiNoteToFrequency(textNoteToNumber(note));
}
}
// @ts-ignore
export function Audio(au: AudioContext = new (window.AudioContext || window.webkitAudioContext)()) {
function masterChannel() {
const gain = au.createGain();
gain.gain.value = 0.5;
const limiter = au.createDynamicsCompressor();
limiter.attack.value = 0.005;
limiter.release.value = 0.1;
limiter.ratio.value = 15.0;
limiter.knee.value = 0.0;
limiter.threshold.value = -0.5;
const analyser = au.createAnalyser();
analyser.fftSize = 2048;
limiter.connect(analyser);
gain.connect(limiter);
limiter.connect(au.destination);
return {
in: gain,
analyser
}
}
function constantSourceCompatible(): AudioNode & {offset: AudioParam, start: () => void} {
if (au.createConstantSource) {
return au.createConstantSource();
} else {
const src = au.createBufferSource();
src.buffer = au.createBuffer(1, 256, au.sampleRate);
const array = src.buffer.getChannelData(0);
for (let i = 0; i < array.length; i++) {
array[i] = 1.0;
}
const gain = au.createGain();
const offsetParam = gain.gain;
src.loop = true;
src.connect(gain);
return Object.assign(gain, {offset: offsetParam, start: () => src.start()});
}
}
function decodeAudioDataCompatible(audioData: ArrayBuffer): Promise<AudioBuffer> {
return new Promise((resolve, reject) => {
return au.decodeAudioData(audioData, resolve, reject);
});
}
const master = masterChannel();
function time(s: number) {
return new Promise<void>(resolve => {setTimeout(()=> resolve(), s * 1000)})
}
async function tone(pitch: number, attack: number, sustain: number, release:number, pan: number = 0.0, destination: AudioNode = master.in) {
const osc = au.createOscillator();
osc.type = "sawtooth";
osc.frequency.value = pitch;
osc.start();
const filter = au.createBiquadFilter();
filter.type = "lowpass";
filter.frequency.value = pitch * 4;
filter.Q.value = 5;
const gain = au.createGain();
gain.gain.value = 0.0;
const panner = au.createPanner();
panner.panningModel = "equalpower";
panner.setPosition(pan, 0, 1-Math.abs(pan));
osc.connect(filter);
filter.connect(gain);
gain.connect(panner);
panner.connect(destination);
gain.gain.linearRampToValueAtTime(0.1, au.currentTime + attack);
await time(sustain + attack);
gain.gain.setValueAtTime(0.1, au.currentTime);
gain.gain.linearRampToValueAtTime(0,au.currentTime + release);
filter.frequency.linearRampToValueAtTime(Math.max(pitch/2, 400), au.currentTime + release);
await time(release + 0.01);
osc.stop(au.currentTime);
panner.disconnect();
}
function SimpleToneSynth(attack: number, sustain: number, release:number, destination: AudioNode = master.in) {
function play(note: FullNote) {
tone(pitch(note), attack, sustain, release, biRnd(), destination);
}
return {
play
}
}
function DelayInsert(time: number, feedback: number, wet: number, destination: AudioNode = master.in) {
const delayNode = au.createDelay(1);
delayNode.delayTime.value = time;
const feedbackGain = au.createGain();
feedbackGain.gain.value = feedback;
delayNode.connect(feedbackGain);
feedbackGain.connect(delayNode);
const delayGain = au.createGain();
delayGain.gain.value = wet;
delayNode.connect(delayGain);
delayGain.connect(destination);
const synthOut = au.createGain();
synthOut.gain.value = 1.0;
synthOut.connect(delayNode);
synthOut.connect(destination);
return {
in: synthOut,
feedback: feedbackGain.gain,
wet: delayGain.gain,
delayTime: delayNode.delayTime
}
}
function ThreeOh(type: OscillatorType = "sawtooth", out: AudioNode = master.in) {
const filter = au.createBiquadFilter();
filter.type = "lowpass";
filter.Q.value = 20;
filter.frequency.value = 300;
const pResonance = filter.Q;
const pCutoff = filter.frequency;
const decayTimeNode = constantSourceCompatible();
decayTimeNode.start();
const pDecay = decayTimeNode.offset;
const env = constantSourceCompatible();
env.start();
env.offset.value = 0.0;
function trigger() {
}
const scaleNode = au.createGain();
scaleNode.gain.value = 4000;
const pEnvMod = scaleNode.gain;
env.connect(scaleNode);
scaleNode.connect(filter.detune);
const osc = au.createOscillator();
osc.type = type;
osc.frequency.value = 440;
osc.start();
const vca = au.createGain();
vca.gain.value = 0.0;
osc.connect(vca);
vca.connect(filter);
filter.connect(out);
function noteOn(note: FullNote, accent: boolean = false, glide: boolean = false) {
if (accent) {
env.offset.cancelScheduledValues(au.currentTime);
//env.offset.setTargetAtTime(1.0,au.currentTime, 0.001);
env.offset.setValueAtTime(1.0, au.currentTime);
env.offset.exponentialRampToValueAtTime(0.01, au.currentTime + pDecay.value/3);
} else {
env.offset.cancelScheduledValues(au.currentTime);
//env.offset.setTargetAtTime(1.0,au.currentTime, 0.001);
env.offset.setValueAtTime(1.0, au.currentTime);
env.offset.exponentialRampToValueAtTime(0.01, au.currentTime + pDecay.value);
}
osc.frequency.cancelScheduledValues(au.currentTime);
osc.frequency.setTargetAtTime(midiNoteToFrequency(textNoteToNumber(note)),au.currentTime, glide ? 0.02 : 0.002);
vca.gain.cancelScheduledValues(au.currentTime);
vca.gain.setValueAtTime(accent ? 0.2 : 0.15, au.currentTime);
//vca.gain.setTargetAtTime(accent ? 0.5 : 0.3,au.currentTime, 0.001);
//vca.gain.setValueAtTime(0.2, au.currentTime);
vca.gain.linearRampToValueAtTime(0.1, au.currentTime + 0.2);
trigger();
}
function noteOff() {
vca.gain.cancelScheduledValues(au.currentTime);
vca.gain.setTargetAtTime(0.0,au.currentTime,0.01);
}
return {
noteOn,
noteOff,
params: {
cutoff: pCutoff,
resonance: pResonance,
envMod: pEnvMod,
decay: pDecay
}
}
}
function kick(out: AudioNode = master.in) {
const osc = au.createOscillator();
osc.frequency.value = 400;
const gain = au.createGain();
gain.gain.value = 0.3;
osc.start();
osc.frequency.exponentialRampToValueAtTime(50, au.currentTime + 0.04);
gain.gain.setValueCurveAtTime([0.5,0.5,0.45,0.4,0.25,0.0], au.currentTime, 0.09);
osc.stop(au.currentTime + 0.1);
window.setTimeout(() => gain.disconnect(), 200);
osc.connect(gain);
gain.connect(out);
}
async function loadBuffer(filePath: string) {
const response = await fetch(filePath);
const arraybuffer = await response.arrayBuffer();
const audioBuffer = await decodeAudioDataCompatible(arraybuffer);
return audioBuffer;
}
async function Sampler(file: string) {
const sampleBuffer = await loadBuffer(file);
function play(gain: number = 0.4, decay: number = 1.0, out: AudioNode = master.in) {
const bufferSource = au.createBufferSource();
bufferSource.buffer = sampleBuffer;
bufferSource.loop = false;
const gainNode = au.createGain();
gainNode.gain.setValueAtTime(gain,au.currentTime);
gainNode.gain.linearRampToValueAtTime(0.0, au.currentTime + decay);
bufferSource.connect(gainNode);
gainNode.connect(out);
bufferSource.start(au.currentTime);
}
return {
play
}
}
async function SamplerDrumMachine(files: string[], out: AudioNode = master.in) {
const sum = au.createGain();
sum.gain.value = 1.0;
sum.connect(out);
const promisedMachines = files.map(Sampler)
const samplers = await Promise.all(promisedMachines);
const mapped = samplers.map(sampler => ({
play: (vel: number) => sampler.play(0.7 * vel, vel * 0.5, sum)
}));
return {
triggers: mapped
}
}
return {
tone,
SimpleToneSynth,
DelayInsert,
ThreeOh,
kick,
Sampler,
SamplerDrumMachine,
master,
context: au
}
}
export type AudioT = ReturnType<typeof Audio> | the_stack |
import type { RegExpVisitor } from "regexpp/visitor"
import type { RegExpContext } from "../utils"
import { isEscapeSequence, createRule, defineRegexpVisitor } from "../utils"
import GraphemeSplitter from "grapheme-splitter"
import type { ReadonlyFlags } from "regexp-ast-analysis"
import { mention, mentionChar } from "../utils/mention"
import type {
CharacterClass,
CharacterClassElement,
Quantifier,
} from "regexpp/ast"
import type { PatternRange } from "../utils/ast-utils/pattern-source"
import type { Rule } from "eslint"
const splitter = new GraphemeSplitter()
/** Returns whether the given string starts with a valid surrogate pair. */
function startsWithSurrogate(s: string): boolean {
if (s.length < 2) {
return false
}
const h = s.charCodeAt(0)
const l = s.charCodeAt(1)
return h >= 0xd800 && h <= 0xdbff && l >= 0xdc00 && l <= 0xdfff
}
type Problem = "Multi" | "Surrogate"
/**
* Returns the problem (if any) with the given grapheme.
*/
function getProblem(grapheme: string, flags: ReadonlyFlags): Problem | null {
if (
grapheme.length > 2 ||
(grapheme.length === 2 && !startsWithSurrogate(grapheme))
) {
return "Multi"
} else if (!flags.unicode && startsWithSurrogate(grapheme)) {
return "Surrogate"
}
return null
}
/** Returns the last grapheme of the quantified element. */
function getGraphemeBeforeQuant(quant: Quantifier): string {
const alt = quant.parent
// find the start index of the first character left of
// this quantifier
let start = quant.start
for (let i = alt.elements.indexOf(quant) - 1; i >= 0; i--) {
const e = alt.elements[i]
if (e.type === "Character" && !isEscapeSequence(e.raw)) {
start = e.start
} else {
break
}
}
const before = alt.raw.slice(
start - alt.start,
quant.element.end - alt.start,
)
const graphemes = splitter.splitGraphemes(before)
const grapheme = graphemes[graphemes.length - 1]
return grapheme
}
interface GraphemeProblem {
readonly grapheme: string
readonly problem: Problem
readonly start: number
readonly end: number
/** A sorted list of all unique elements that overlap with this grapheme */
readonly elements: CharacterClassElement[]
}
/** Returns all grapheme problem in the given character class. */
function getGraphemeProblems(
cc: CharacterClass,
flags: ReadonlyFlags,
): GraphemeProblem[] {
let offset = cc.negate ? 2 : 1
const graphemes = splitter.splitGraphemes(cc.raw.slice(offset, -1))
const problems: GraphemeProblem[] = []
for (const grapheme of graphemes) {
const problem = getProblem(grapheme, flags)
if (problem !== null) {
const start = offset + cc.start
const end = start + grapheme.length
problems.push({
grapheme,
problem,
start,
end,
elements: cc.elements.filter(
(e) => e.start < end && e.end > start,
),
})
}
offset += grapheme.length
}
return problems
}
/** Returns a fix for the given problems (if possible). */
function getGraphemeProblemsFix(
problems: readonly GraphemeProblem[],
cc: CharacterClass,
): string | null {
if (cc.negate) {
// we can't fix a negated character class
return null
}
if (
!problems.every(
(p) =>
p.start === p.elements[0].start &&
p.end === p.elements[p.elements.length - 1].end,
)
) {
// the graphemes don't line up with character class elements
return null
}
// The prefix of graphemes
const prefix = problems
.map((p) => p.grapheme)
.sort((a, b) => b.length - a.length)
.join("|")
// The rest of the character class
let ccRaw = cc.raw
for (let i = problems.length - 1; i >= 0; i--) {
const { start, end } = problems[i]
ccRaw = ccRaw.slice(0, start - cc.start) + ccRaw.slice(end - cc.start)
}
if (ccRaw.startsWith("[^")) {
ccRaw = `[\\${ccRaw.slice(1)}`
}
let fix = prefix
let singleAlternative = problems.length === 1
if (ccRaw !== "[]") {
fix += `|${ccRaw}`
singleAlternative = false
}
if (singleAlternative && cc.parent.type === "Alternative") {
return fix
}
if (cc.parent.type === "Alternative" && cc.parent.elements.length === 1) {
// The character class is the only
return fix
}
return `(?:${fix})`
}
export default createRule("no-misleading-unicode-character", {
meta: {
docs: {
description:
"disallow multi-code-point characters in character classes and quantifiers",
category: "Possible Errors",
// TODO Switch to recommended in the major version.
// recommended: true,
recommended: false,
},
schema: [
{
type: "object",
properties: {
fixable: { type: "boolean" },
},
additionalProperties: false,
},
],
fixable: "code",
hasSuggestions: true,
messages: {
characterClass:
"The character(s) {{ graphemes }} are all represented using multiple {{ unit }}.{{ uFlag }}",
quantifierMulti:
"The character {{ grapheme }} is represented using multiple Unicode code points. The quantifier only applies to the last code point {{ last }} and not to the whole character.",
quantifierSurrogate:
"The character {{ grapheme }} is represented using a surrogate pair. The quantifier only applies to the tailing surrogate {{ last }} and not to the whole character.",
// suggestions
fixCharacterClass:
"Move the character(s) {{ graphemes }} outside the character class.",
fixQuantifier: "Wrap a group around {{ grapheme }}.",
},
type: "problem",
},
create(context) {
const fixable = context.options[0]?.fixable ?? false
/** */
function makeFix(
fix: Rule.ReportDescriptorOptionsBase["fix"],
messageId: string,
data?: Record<string, string>,
): Partial<Rule.ReportDescriptorOptions> {
if (fixable) {
return { fix }
}
return {
suggest: [{ messageId, data, fix }],
}
}
/**
* Create visitor
*/
function createVisitor(
regexpContext: RegExpContext,
): RegExpVisitor.Handlers {
const {
node,
patternSource,
flags,
getRegexpLocation,
fixReplaceNode,
} = regexpContext
return {
onCharacterClassEnter(ccNode) {
const problems = getGraphemeProblems(ccNode, flags)
if (problems.length === 0) {
return
}
const range: PatternRange = {
start: problems[0].start,
end: problems[problems.length - 1].end,
}
const fix = getGraphemeProblemsFix(problems, ccNode)
const graphemes = problems
.map((p) => mention(p.grapheme))
.join(", ")
const uFlag = problems.every(
(p) => p.problem === "Surrogate",
)
context.report({
node,
loc: getRegexpLocation(range),
messageId: "characterClass",
data: {
graphemes,
unit: flags.unicode ? "code points" : "char codes",
uFlag: uFlag ? " Use the `u` flag." : "",
},
...makeFix(
fixReplaceNode(ccNode, () => fix),
"fixCharacterClass",
{ graphemes },
),
})
},
onQuantifierEnter(qNode) {
if (qNode.element.type !== "Character") {
return
}
const grapheme = getGraphemeBeforeQuant(qNode)
const problem = getProblem(grapheme, flags)
if (problem === null) {
return
}
context.report({
node,
loc: getRegexpLocation(qNode),
messageId: `quantifier${problem}`,
data: {
grapheme: mention(grapheme),
last: mentionChar(qNode.element),
},
...makeFix(
(fixer) => {
const range = patternSource.getReplaceRange({
start: qNode.element.end - grapheme.length,
end: qNode.element.end,
})
if (!range) {
return null
}
return range.replace(fixer, `(?:${grapheme})`)
},
"fixQuantifier",
{ grapheme: mention(grapheme) },
),
})
},
}
}
return defineRegexpVisitor(context, {
createVisitor,
})
},
}) | the_stack |
import {ApiRequestBuilder, ApiResult, ApiVersion} from "helpers/api_request_builder";
import {SparkRoutes} from "helpers/spark_routes";
import _ from "lodash";
import Stream from "mithril/stream";
import {stringOrUndefined} from "models/compare/pipeline_instance_json";
import {MaterialModificationJSON} from "models/config_repos/serialization";
import {humanizedMaterialAttributeName, MaterialModification} from "models/config_repos/types";
import {Filter} from "models/maintenance_mode/material";
import {Origin, OriginJSON, OriginType} from "models/origin";
import {mapTypeToDisplayType} from "./types";
interface BaseAttributesJSON {
name: string;
auto_update: boolean;
}
interface GitMaterialAttributesJSON extends BaseAttributesJSON {
url: string;
branch: string;
}
interface SvnMaterialAttributesJSON extends BaseAttributesJSON {
url: string;
check_externals: boolean;
}
interface HgMaterialAttributesJSON extends BaseAttributesJSON {
url: string;
branch: string;
}
interface P4MaterialAttributesJSON extends BaseAttributesJSON {
port: string;
use_tickets: boolean;
view: string;
}
interface TfsMaterialAttributesJSON extends BaseAttributesJSON {
url: string;
domain: string;
project_path: string;
}
interface PackageMaterialAttributesJSON extends BaseAttributesJSON {
ref: string;
package_name: string;
package_repo_name: string;
}
interface PluggableScmMaterialAttributesJSON extends BaseAttributesJSON {
ref: string;
scm_name: string;
origin: OriginJSON;
}
type MaterialAttributesJSON =
GitMaterialAttributesJSON
| SvnMaterialAttributesJSON
| HgMaterialAttributesJSON
| P4MaterialAttributesJSON
| TfsMaterialAttributesJSON
| PackageMaterialAttributesJSON
| PluggableScmMaterialAttributesJSON;
export interface MaterialWithFingerprintJSON {
type: string;
fingerprint: string;
attributes: MaterialAttributesJSON;
}
interface MaterialMessageJSON {
level: string;
message: string;
description: string;
}
interface MaterialWithModificationJSON {
config: MaterialWithFingerprintJSON;
can_trigger_update: boolean;
material_update_in_progress: boolean;
material_update_start_time?: string;
modification: MaterialModificationJSON;
messages: MaterialMessageJSON[];
}
interface MaterialsJSON {
materials: MaterialWithModificationJSON[];
}
export class MaterialMessage {
level: string;
message: string;
description: string;
constructor(level: string, message: string, description: string) {
this.level = level;
this.message = message;
this.description = description;
}
static fromJSON(data: MaterialMessageJSON): MaterialMessage {
return new MaterialMessage(data.level, data.message, data.description);
}
}
export class MaterialMessages extends Array<MaterialMessage> {
constructor(...vals: MaterialMessage[]) {
super(...vals);
Object.setPrototypeOf(this, Object.create(MaterialMessages.prototype));
}
static fromJSON(data: MaterialMessageJSON[]): MaterialMessages {
return new MaterialMessages(...data.map((a) => MaterialMessage.fromJSON(a)));
}
hasMessages = () => this.length > 0;
errors = () => _.filter(this, {level: "ERROR"});
warnings = () => _.filter(this, {level: "WARNING"});
}
abstract class MaterialAttributes {
name: Stream<string | undefined>;
autoUpdate: Stream<boolean>;
protected constructor(name?: string, autoUpdate: boolean = false) {
this.name = Stream(name);
this.autoUpdate = Stream(autoUpdate);
}
static deserialize(material: MaterialWithFingerprintJSON) {
switch (material.type) {
case "git":
return GitMaterialAttributes.fromJSON(material.attributes as GitMaterialAttributesJSON);
case "svn":
return SvnMaterialAttributes.fromJSON(material.attributes as SvnMaterialAttributesJSON);
case "hg":
return HgMaterialAttributes.fromJSON(material.attributes as HgMaterialAttributesJSON);
case "p4":
return P4MaterialAttributes.fromJSON(material.attributes as P4MaterialAttributesJSON);
case "tfs":
return TfsMaterialAttributes.fromJSON(material.attributes as TfsMaterialAttributesJSON);
case "package":
return PackageMaterialAttributes.fromJSON(material.attributes as PackageMaterialAttributesJSON);
case "plugin":
return PluggableScmMaterialAttributes.fromJSON(material.attributes as PluggableScmMaterialAttributesJSON);
default:
throw new Error(`Unknown material type ${material.type}`);
}
}
}
export class GitMaterialAttributes extends MaterialAttributes {
url: Stream<string | undefined>;
branch: Stream<string | undefined>;
constructor(name?: string, autoUpdate?: boolean, url?: string, branch?: string) {
super(name, autoUpdate);
this.url = Stream(url);
this.branch = Stream(branch);
}
static fromJSON(json: GitMaterialAttributesJSON) {
return new GitMaterialAttributes(json.name, json.auto_update, json.url, json.branch);
}
}
export class SvnMaterialAttributes extends MaterialAttributes {
url: Stream<string | undefined>;
checkExternals: Stream<boolean | undefined>;
constructor(name?: string,
autoUpdate?: boolean,
url?: string,
checkExternals?: boolean) {
super(name, autoUpdate);
this.url = Stream(url);
this.checkExternals = Stream(checkExternals);
}
static fromJSON(json: SvnMaterialAttributesJSON) {
return new SvnMaterialAttributes(json.name, json.auto_update, json.url, json.check_externals);
}
}
export class HgMaterialAttributes extends MaterialAttributes {
url: Stream<string | undefined>;
branch: Stream<string | undefined>;
constructor(name?: string, autoUpdate?: boolean, url?: string, branch?: string) {
super(name, autoUpdate);
this.url = Stream(url);
this.branch = Stream(branch);
}
static fromJSON(json: HgMaterialAttributesJSON) {
return new HgMaterialAttributes(json.name, json.auto_update, json.url, json.branch);
}
}
export class P4MaterialAttributes extends MaterialAttributes {
port: Stream<string | undefined>;
useTickets: Stream<boolean | undefined>;
view: Stream<string | undefined>;
constructor(name?: string,
autoUpdate?: boolean,
port?: string,
useTickets?: boolean,
view?: string) {
super(name, autoUpdate);
this.port = Stream(port);
this.useTickets = Stream(useTickets);
this.view = Stream(view);
}
static fromJSON(json: P4MaterialAttributesJSON) {
return new P4MaterialAttributes(json.name, json.auto_update, json.port, json.use_tickets, json.view);
}
}
export class TfsMaterialAttributes extends MaterialAttributes {
url: Stream<string | undefined>;
domain: Stream<string | undefined>;
projectPath: Stream<string | undefined>;
constructor(name?: string,
autoUpdate?: boolean,
url?: string,
domain?: string,
projectPath?: string) {
super(name, autoUpdate);
this.url = Stream(url);
this.domain = Stream(domain);
this.projectPath = Stream(projectPath);
}
static fromJSON(json: TfsMaterialAttributesJSON) {
return new TfsMaterialAttributes(json.name, json.auto_update, json.url, json.domain, json.project_path);
}
}
export class PackageMaterialAttributes extends MaterialAttributes {
ref: Stream<string | undefined>;
packageName: Stream<string | undefined>;
packageRepoName: Stream<string | undefined>;
constructor(name?: string, autoUpdate?: boolean, ref?: string, packageName?: string, packageRepoName?: string) {
super(name, autoUpdate);
this.ref = Stream(ref);
this.packageName = Stream(packageName);
this.packageRepoName = Stream(packageRepoName);
}
static fromJSON(data: PackageMaterialAttributesJSON): PackageMaterialAttributes {
return new PackageMaterialAttributes(data.name, data.auto_update, data.ref, data.package_name, data.package_repo_name);
}
}
export class PluggableScmMaterialAttributes extends MaterialAttributes {
ref: Stream<string>;
scmName: Stream<string>;
origin: Stream<Origin>;
constructor(name: string | undefined, autoUpdate: boolean | undefined, ref: string, scmName: string, origin: Origin = new Origin(OriginType.GoCD)) {
super(name, autoUpdate);
this.ref = Stream(ref);
this.scmName = Stream(scmName);
this.origin = Stream(origin);
}
static fromJSON(data: PluggableScmMaterialAttributesJSON): PluggableScmMaterialAttributes {
return new PluggableScmMaterialAttributes(data.name, data.auto_update, data.ref, data.scm_name, Origin.fromJSON(data.origin));
}
}
export class MaterialWithFingerprint {
type: Stream<string>;
fingerprint: Stream<string>;
attributes: Stream<MaterialAttributes>;
constructor(type: string, fingerprint: string, attributes: MaterialAttributes) {
this.type = Stream(type);
this.fingerprint = Stream(fingerprint);
this.attributes = Stream(attributes);
}
static fromJSON(data: MaterialWithFingerprintJSON): MaterialWithFingerprint {
return new MaterialWithFingerprint(data.type, data.fingerprint, MaterialAttributes.deserialize(data));
}
name(): string {
return this.attributes()!.name() || "";
}
typeForDisplay() {
return mapTypeToDisplayType[this.type()!];
}
displayName() {
const name = this.name();
if (name.length > 0) {
return name;
}
if (this.type() === "package") {
const attrs = this.attributes() as PackageMaterialAttributes;
return `${attrs.packageRepoName()}_${attrs.packageName()}`;
}
if (this.type() === "plugin") {
return (this.attributes() as PluggableScmMaterialAttributes).scmName();
}
if (this.type() === "p4") {
return (this.attributes() as P4MaterialAttributes).port();
}
// @ts-ignore
return this.attributes()!.url();
}
attributesAsString() {
switch (this.type()) {
case "git":
// @ts-ignore
return `${this.attributes()!.url()} [ ${this.attributes()!.branch()} ]`;
case "hg":
const hgAttrs = this.attributes() as HgMaterialAttributes;
const branch = hgAttrs.branch() ? ` [ ${hgAttrs.branch()} ]` : "";
return `${hgAttrs.url()}${branch}`;
case "svn":
return (this.attributes() as SvnMaterialAttributes).url();
case "tfs":
return (this.attributes() as TfsMaterialAttributes).url();
case "p4":
const p4Attrs = this.attributes() as P4MaterialAttributes;
return `${p4Attrs.port()} [ ${p4Attrs.view()} ]`;
case "package":
const attrs = this.attributes() as PackageMaterialAttributes;
return `${attrs.packageRepoName()}_${attrs.packageName()}`;
case "plugin":
return (this.attributes() as PluggableScmMaterialAttributes).scmName();
}
return "";
}
attributesAsMap(): Map<string, any> {
const map: Map<string, string> = new Map();
let keys: string[] = [];
switch (this.type()) {
case "git":
case "hg":
keys = ["url", "branch"];
break;
case "p4":
keys = ["port", "view"];
break;
case "tfs":
keys = ["url", "domain", "projectPath"];
break;
case "svn":
keys = ["url"];
break;
}
const reducer = (map: Map<any, any>, value: any, key: string) => {
if (keys.includes(key)) {
MaterialWithFingerprint.resolveKeyValueForAttribute(map, value, key);
}
return map;
};
_.reduce(this.attributes(), reducer, map);
return map;
}
private static resolveKeyValueForAttribute(accumulator: Map<string, string>, value: any, key: string) {
if (key.startsWith("__") || ["name"].includes(key)) {
return accumulator;
}
let renderedValue = value;
const renderedKey = humanizedMaterialAttributeName(key);
// test for value being a stream
if (_.isFunction(value)) {
value = value();
}
// test for value being an EncryptedPassword
if (value && value.valueForDisplay) {
renderedValue = value.valueForDisplay();
}
renderedValue = _.isFunction(renderedValue) ? renderedValue() : renderedValue;
if (key === "filter" && renderedValue) {
renderedValue = (renderedValue as Filter).ignore();
}
accumulator.set(renderedKey, renderedValue);
return accumulator;
}
}
export class MaterialWithModification {
config: MaterialWithFingerprint;
canTriggerUpdate: boolean;
materialUpdateInProgress: boolean;
materialUpdateStartTime: stringOrUndefined;
modification: MaterialModification | null;
messages: MaterialMessages;
constructor(config: MaterialWithFingerprint, canTriggerUpdate: boolean = false, materialUpdateInProgress: boolean = false, materialUpdateStartTime?: stringOrUndefined, modification: MaterialModification | null = null, messages: MaterialMessages = new MaterialMessages()) {
this.config = config;
this.canTriggerUpdate = canTriggerUpdate;
this.materialUpdateInProgress = materialUpdateInProgress;
this.materialUpdateStartTime = materialUpdateStartTime;
this.modification = modification;
this.messages = messages;
}
static fromJSON(data: MaterialWithModificationJSON): MaterialWithModification {
const mod = data.modification === null ? null : MaterialModification.fromJSON(data.modification);
return new MaterialWithModification(MaterialWithFingerprint.fromJSON(data.config), data.can_trigger_update
, data.material_update_in_progress, data.material_update_start_time, mod, MaterialMessages.fromJSON(data.messages));
}
matches(query: string) {
if (!query) {
return true;
}
const searchableStrings = [
this.config.type(),
this.config.name(),
this.config.attributesAsString()
];
const modification = this.modification;
if (modification !== null) {
searchableStrings.push(modification.username, modification.revision, modification.comment);
}
return searchableStrings.some((value) => value ? value.toLowerCase().includes(query.trim().toLowerCase()) : false);
}
type() {
return this.config.type();
}
}
export class Materials extends Array<MaterialWithModification> {
constructor(...vals: MaterialWithModification[]) {
super(...vals);
Object.setPrototypeOf(this, Object.create(Materials.prototype));
}
static fromJSON(data: MaterialWithModificationJSON[]): Materials {
return new Materials(...data.map((a) => MaterialWithModification.fromJSON(a)));
}
sortOnType() {
this.sort((m1, m2) => m1.type()!.localeCompare(m2.type()!));
}
}
interface LinkJSON {
href: string;
}
interface ModificationsJSON {
_links?: {
next?: LinkJSON;
previous?: LinkJSON;
};
modifications: MaterialModificationJSON[];
}
export class MaterialModifications extends Array<MaterialModification> {
nextLink: stringOrUndefined;
previousLink: stringOrUndefined;
constructor(...vals: MaterialModification[]) {
super(...vals);
Object.setPrototypeOf(this, Object.create(MaterialModifications.prototype));
}
static fromJSON(data: ModificationsJSON): MaterialModifications {
const mods = new MaterialModifications(...data.modifications.map((a) => MaterialModification.fromJSON(a)));
if (data._links) {
mods.nextLink = data._links.next === undefined ? undefined : data._links.next.href;
mods.previousLink = data._links.previous === undefined ? undefined : data._links.previous.href;
}
return mods;
}
}
export class MaterialAPIs {
private static API_VERSION_HEADER = ApiVersion.latest;
static all(etag?: string) {
return ApiRequestBuilder.GET(SparkRoutes.getAllMaterials(), this.API_VERSION_HEADER, {etag})
.then((result: ApiResult<string>) => result.map((body) => {
const data = JSON.parse(body) as MaterialsJSON;
return Materials.fromJSON(data.materials);
}));
}
/*
* Link is the href provided in the first response which can be used to get the next/previous list of records
*/
static modifications(fingerprint: string, searchPattern: string, link?: string) {
const url = link ? link : SparkRoutes.getModifications(fingerprint, searchPattern);
return ApiRequestBuilder.GET(url, this.API_VERSION_HEADER)
.then((result: ApiResult<string>) => result.map((body) => {
const parse = JSON.parse(body) as ModificationsJSON;
return MaterialModifications.fromJSON(parse);
}));
}
static usages(fingerprint: string) {
return ApiRequestBuilder.GET(SparkRoutes.getMaterialUsages(fingerprint), this.API_VERSION_HEADER)
.then((result: ApiResult<string>) => result.map((body) => {
const parse = JSON.parse(body) as MaterialUsagesJSON;
return MaterialUsages.fromJSON(parse);
}));
}
static triggerUpdate(fingerprint: string) {
return ApiRequestBuilder.POST(SparkRoutes.getMaterialTriggerPath(fingerprint), this.API_VERSION_HEADER);
}
}
interface MaterialUsagesJSON {
usages: string[];
}
export class MaterialUsages extends Array<string> {
constructor(...vals: string[]) {
super(...vals);
Object.setPrototypeOf(this, Object.create(MaterialUsages.prototype));
}
static fromJSON(data: MaterialUsagesJSON): MaterialUsages {
return new MaterialUsages(...data.usages);
}
} | the_stack |
export interface IFilesystem {
/**
* Convenience property for `os.EOL`.
*/
eol: string
/**
* Convenience property for `path.sep`.
*/
separator: string
/**
* Convenience property for `os.homedir` function
*/
homedir: () => string
/**
* Appends given data to the end of file. If file or any parent directory doesn't exist it will be created.
*
* @param path The path to the file.
* @param data The data to append.
* @param options Additional options.
*/
append: (path: string, data: string | Buffer, options?: IFilesystemAppendOptions) => void
/**
* Appends given data to the end of file. If file or any parent directory doesn't exist it will be created.
*
* @param path The path to the file.
* @param data The data to append.
* @param options Additional options.
*/
appendAsync: (path: string, data: string | Buffer, options?: IFilesystemAppendOptions) => Promise<void>
/** Copies given file or directory (with everything inside). */
copy: (from: string, to: string, options?: IFilesystemCopyOptions) => void
/** Copies given file or directory (with everything inside). */
copyAsync: (from: string, to: string, options?: IFilesystemCopyOptions) => Promise<void>
/** See Node's fs.createReadStream. */
createReadStream: (path: string | Buffer | URL, options: any) => any
/** See Node's fs.createWriteStream. */
createWriteStream: (path: string | Buffer | URL, options: any) => any
/** Returns Current Working Directory (CWD) for this instance of jetpack, or creates new jetpack object with given path as its internal CWD. */
cwd: (...strings) => any
/**
* Ensures that directory on given path exists and meets given criteria.
*
* If any criterium is not met it will be after this call. If any parent directory in path doesn't exist it will be created (like mkdir -p).
*/
dir: (path: string, options?: IFilesystemDirOptions) => any
/**
* Ensures that directory on given path exists and meets given criteria.
*
* If any criterium is not met it will be after this call. If any parent directory in path doesn't exist it will be created (like mkdir -p).
*/
dirAsync: (path: string, options?: IFilesystemDirOptions) => Promise<any>
/**
* Checks whether something exists on given path.
*/
exists: (path: string) => false | 'dir' | 'file' | 'other'
/**
* Checks whether something exists on given path.
*/
existsAsync: (path: string) => Promise<false | 'dir' | 'file' | 'other'>
/**
* Ensures that file exists and meets given criteria.
*
* If any criterium is not met it will be after this call. If any parent directory in path doesn't exist it will be created (like mkdir -p).
*/
file: (path: string, options: IFilesystemFileOptions) => any
/**
* Ensures that file exists and meets given criteria.
*
* If any criterium is not met it will be after this call. If any parent directory in path doesn't exist it will be created (like mkdir -p).
*/
fileAsync: (path: string, options: IFilesystemFileOptions) => any
/**
* Finds in directory specified by path all files fulfilling searchOptions.
*/
find(path: string, options?: IFilesystemFindOptions): string[]
find(options: IFilesystemFindOptions): string[]
/**
* Finds in directory specified by path all files fulfilling searchOptions.
*/
findAsync(path: string, options?: IFilesystemFindOptions): Promise<string[]>
findAsync(options: IFilesystemFindOptions): Promise<string[]>
/**
* Returns details about a fs object found at a path.
*/
inspect(path: string, options?: IFileSystemInspectOptions): IFileSystemInspectResult[] | void
/**
* Returns details about a fs object found at a path.
*/
inspectAsync(path: string, options?: IFileSystemInspectOptions): Promise<IFileSystemInspectResult[] | void>
/**
* Returns details about a fs tree found at the path.
*/
inspectTree(path: string, options?: IFileSystemInspectTreeOptions): IFileSystemInspectTreeResult | void
/**
* Returns details about a fs tree found at the path.
*/
inspectTreeAsync(path: string, options?: IFileSystemInspectTreeOptions): Promise<IFileSystemInspectTreeResult | void>
/**
* Lists the contents of directory.
*/
list(path?: string): string[] | void
/**
* Lists the contents of directory.
*/
listAsync(path?: string): Promise<string[] | void>
/**
* Moves given path to a new location.
*/
move(from: string, to: string): void
/**
* Moves given path to a new location.
*/
moveAsync(from: string, to: string): void
/**
* Resolves an absolute path.
*/
path(...strings): any
/**
* Reads the contents of a file.
*
* @param path The path to read from.
* @param options Additional options.
*/
read(path: string, options?: IFilesystemReadType): string | Buffer | any
/**
* Reads the contents of a file.
*
* @param path The path to read from.
* @param options Additional options.
*/
readAsync(path: string, options?: IFilesystemReadType): Promise<string | Buffer | any>
/**
* Deletes the path or file. It may or may not exist.
*/
remove(path?: string): void
/**
* Deletes the path or file. It may or may not exist.
*/
removeAsync(path?: string): Promise<void>
/**
* Renames a file or directory. The `to` is just the file name as this only renames in the same directory.
*/
rename(from: string, to: string): void
/**
* Renames a file or directory. The `to` is just the file name as this only renames in the same directory.
*/
renameAsync(from: string, to: string): Promise<void>
/**
* Creates a symlink.
*/
symlink(from: string, to: string): void
/**
* Creates a symlink.
*/
symlinkAsync(from: string, to: string): Promise<void>
/**
* Writes the contents of a file.
*/
write(path: string, data: any, options?: IFilesystemWriteOptions): void
/**
* Writes the contents of a file.
*/
writeAsync(path: string, data: any, options?: IFilesystemWriteOptions): Promise<void>
/**
* Retrieves a list of subdirectories for a given path.
*/
subdirectories(path: string, isRelative?: boolean, matching?: string, symlinks?: boolean): string[]
/**
* Is this a file?
*/
isFile(path: string): boolean
/**
* Is this not a file?
*/
isNotFile(path: string): boolean
/**
* Is this a directory?
*/
isDirectory(path: string): boolean
/**
* Is this not a directory?
*/
isNotDirectory(path: string): boolean
/**
* Is this not a local path?
*/
isLocalPath(path: string): boolean
/**
* Get the absolute path
*/
getAbsolutePath(path: string): string
}
export interface IFilesystemAppendOptions {
/** If the file doesn't exist yet, will be created with given mode. Value could be number (eg. 0o700) or string (eg. '700'). */
mode?: string | number
}
/**
* Used for resolving conflicts when performing copy oeperations.
*
* @param srcData The source inspect data.
* @param destData The destination inspect data.
* @returns Should we overwrite?
*/
export type IFilesystemCopyResolveHandler = (srcData: any, destData: any) => boolean
export interface IFilesystemCopyOptions {
/** Should we overwrite? */
overwrite?: boolean | IFilesystemCopyResolveHandler
/** A glob to filter down which files to copy. */
matching?: string
}
export interface IFilesystemDirOptions {
/**
* Whether directory should be empty (no other files or directories inside). If set to true and directory contains any files or subdirectories all of them will be deleted.
*/
empty?: boolean
/**
* Ensures directory has specified mode. If not set and directory already exists, current mode will be preserved. Value could be number (eg. 0o700) or string (eg. '700').
*/
mode?: string | number
}
export interface IFilesystemFileOptions {
/**
* Sets file content. If Object or Array given to this parameter data will be written as JSON.
*/
content?: any
/**
* If writing JSON data this tells how many spaces should one indentation have. default: 2
*/
jsonIndent: number
/**
* Ensures file has specified mode. If not set and file already exists, current mode will be preserved. Value could be number (eg. 0o700) or string (eg. '700').
*/
mode: string | number
}
export interface IFilesystemFindOptions {
/** An additional glob pattern to filter files. */
matching?: string
/** Should we find files? default: true */
files?: boolean
/** Should we find directories?: default: false */
directories?: boolean
/** Should we be recursive?: default: true */
recursive?: boolean
/** Should we report symlinks?: default false */
symlinks?: boolean
}
export interface IFilesystemExtensionExtra {
/**
* The end-of-line character which changes per-platform.
*/
eol: string
/**
* The fs separator character.
*/
separator: string
/**
* Find immediate subdirectories under a directory.
*/
subdirectories: (path: string) => string[]
}
export interface IFileSystemInspectOptions {
/** Performs a checksum on the file. Ignore directories. */
checksum?: 'md5' | 'sha1' | 'sha256' | 'sha512'
/** Adds unix file permissionss. default: false */
mode?: boolean
/** Adds access, modified, and change times. default: false */
times?: boolean
/** Adds an absolute path. default: false */
absolutePath?: boolean
/** Report or follow symlinks. default: false */
symlinks?: 'report' | 'follow'
}
export interface IFileSystemInspectResult {
/** The filename */
name: string
/** The type of resource. */
type: 'file' | 'dir' | 'symlink'
/** The size in bytes. */
size: number
/** The md5 if the checksum was set to `md5` */
md5?: string
/** The sha1 if the checksum was set to `sha1` */
sha1?: string
/** The sha256 if the checksum was set to `sha256` */
sha256?: string
/** The sha512 if the checksum was set to `sha512` */
sha512?: string
/** The unix permission mode */
mode?: number
/** Last accessed */
accessTime?: Date
/** Last modified */
modifyTime?: Date
/** Change time */
changeTime?: Date
}
export interface IFileSystemInspectTreeOptions {
/** Performs a checksum on the file. Ignore directories. */
checksum?: 'md5' | 'sha1' | 'sha256' | 'sha512'
/** Adds an absolute path. default: false */
relativePath?: boolean
/** Report or follow symlinks. default: false */
symlinks?: 'report' | 'follow'
}
export interface IFileSystemInspectTreeResult {
/** The filename */
name: string
/** The type of resource. */
type: 'file' | 'dir' | 'symlink'
/** The size in bytes. */
size: number
/** The relative path from the inspected directory. */
relativePath?: string
/** The md5 if the checksum was set to `md5` */
md5?: string
/** The sha1 if the checksum was set to `sha1` */
sha1?: string
/** The sha256 if the checksum was set to `sha256` */
sha256?: string
/** The sha512 if the checksum was set to `sha512` */
sha512?: string
/** Children results */
children?: IFileSystemInspectTreeOptions[]
}
export type IFilesystemReadType =
/** The utf-8 string (default) */
| 'utf8'
/** A Node Buffer */
| 'buffer'
/** An object converted from JSON. */
| 'json'
/** An object converted from JSON but with ISO dates converted. */
| 'jsonWithDates'
export interface IFilesystemWriteOptions {
/**
* A safer and slower way to write the file to disk. Default: false.
*/
atomic?: boolean
/**
* If this is JSON, this will control the amount of indentation. Default: 2
*/
jsonIndent?: number
}
// from https://github.com/Microsoft/TypeScript/blob/master/src/lib/dom.generated.d.ts#L12209-L12223
// added manually so we don't have to import typescript's dom typings
export interface URL {
hash: string
host: string
hostname: string
href: string
readonly origin: string
password: string
pathname: string
port: string
protocol: string
search: string
username: string
readonly searchParams: any
toString(): string
} | the_stack |
/*
https://github.com/locutusjs/locutus/blob/master/src/php/datetime/strtotime.js
*/
const reSpace = '[ \\t]+'
const reSpaceOpt = '[ \\t]*'
const reMeridian = '(?:([ap])\\.?m\\.?([\\t ]|$))'
const reHour24 = '(2[0-4]|[01]?[0-9])'
const reHour24lz = '([01][0-9]|2[0-4])'
const reHour12 = '(0?[1-9]|1[0-2])'
const reMinute = '([0-5]?[0-9])'
const reMinutelz = '([0-5][0-9])'
const reSecond = '(60|[0-5]?[0-9])'
const reSecondlz = '(60|[0-5][0-9])'
const reFrac = '(?:\\.([0-9]+))'
const reDayfull = 'sunday|monday|tuesday|wednesday|thursday|friday|saturday'
const reDayabbr = 'sun|mon|tue|wed|thu|fri|sat'
const reDaytext = reDayfull + '|' + reDayabbr + '|weekdays?'
const reReltextnumber = 'first|second|third|fourth|fifth|sixth|seventh|eighth?|ninth|tenth|eleventh|twelfth'
const reReltexttext = 'next|last|previous|this'
const reReltextunit = '(?:second|sec|minute|min|hour|day|fortnight|forthnight|month|year)s?|weeks|' + reDaytext
const reYear = '([0-9]{1,4})'
const reYear2 = '([0-9]{2})'
const reYear4 = '([0-9]{4})'
const reYear4withSign = '([+-]?[0-9]{4})'
const reMonth = '(1[0-2]|0?[0-9])'
const reMonthlz = '(0[0-9]|1[0-2])'
const reDay = '(?:(3[01]|[0-2]?[0-9])(?:st|nd|rd|th)?)'
const reDaylz = '(0[0-9]|[1-2][0-9]|3[01])'
const reMonthFull = 'january|february|march|april|may|june|july|august|september|october|november|december'
const reMonthAbbr = 'jan|feb|mar|apr|may|jun|jul|aug|sept?|oct|nov|dec'
const reMonthroman = 'i[vx]|vi{0,3}|xi{0,2}|i{1,3}'
const reMonthText = '(' + reMonthFull + '|' + reMonthAbbr + '|' + reMonthroman + ')'
const reTzCorrection = '((?:GMT)?([+-])' + reHour24 + ':?' + reMinute + '?)'
const reTzAbbr = '\\(?([a-zA-Z]{1,6})\\)?'
const reDayOfYear = '(00[1-9]|0[1-9][0-9]|[12][0-9][0-9]|3[0-5][0-9]|36[0-6])'
const reWeekOfYear = '(0[1-9]|[1-4][0-9]|5[0-3])'
const reDateNoYear = reMonthText + '[ .\\t-]*' + reDay + '[,.stndrh\\t ]*'
function processMeridian (hour, meridian) {
meridian = meridian && meridian.toLowerCase()
switch (meridian) {
case 'a':
hour += hour === 12 ? -12 : 0
break
case 'p':
hour += hour !== 12 ? 12 : 0
break
}
return hour
}
function processYear (yearStr) {
let year = +yearStr
if (yearStr.length < 4 && year < 100) {
year += year < 70 ? 2000 : 1900
}
return year
}
function lookupMonth (monthStr) {
return {
jan: 0,
january: 0,
i: 0,
feb: 1,
february: 1,
ii: 1,
mar: 2,
march: 2,
iii: 2,
apr: 3,
april: 3,
iv: 3,
may: 4,
v: 4,
jun: 5,
june: 5,
vi: 5,
jul: 6,
july: 6,
vii: 6,
aug: 7,
august: 7,
viii: 7,
sep: 8,
sept: 8,
september: 8,
ix: 8,
oct: 9,
october: 9,
x: 9,
nov: 10,
november: 10,
xi: 10,
dec: 11,
december: 11,
xii: 11
}[monthStr.toLowerCase()]
}
function lookupWeekday (dayStr, desiredSundayNumber = 0) {
const dayNumbers = {
mon: 1,
monday: 1,
tue: 2,
tuesday: 2,
wed: 3,
wednesday: 3,
thu: 4,
thursday: 4,
fri: 5,
friday: 5,
sat: 6,
saturday: 6,
sun: 0,
sunday: 0
}
return dayNumbers[dayStr.toLowerCase()] || desiredSundayNumber
}
function lookupRelative (relText) {
const relativeNumbers = {
last: -1,
previous: -1,
this: 0,
first: 1,
next: 1,
second: 2,
third: 3,
fourth: 4,
fifth: 5,
sixth: 6,
seventh: 7,
eight: 8,
eighth: 8,
ninth: 9,
tenth: 10,
eleventh: 11,
twelfth: 12
}
const relativeBehavior = {
this: 1
}
const relTextLower = relText.toLowerCase()
return {
amount: relativeNumbers[relTextLower],
behavior: relativeBehavior[relTextLower] || 0
}
}
function processTzCorrection (tzOffset, oldValue) {
const reTzCorrectionLoose = /(?:GMT)?([+-])(\d+)(:?)(\d{0,2})/i
tzOffset = tzOffset && tzOffset.match(reTzCorrectionLoose)
if (!tzOffset) {
return oldValue
}
const sign = tzOffset[1] === '-' ? -1 : 1
let hours = +tzOffset[2]
let minutes = +tzOffset[4]
if (!tzOffset[4] && !tzOffset[3]) {
minutes = Math.floor(hours % 100)
hours = Math.floor(hours / 100)
}
// timezone offset in seconds
return sign * (hours * 60 + minutes) * 60
}
// tz abbrevation : tz offset in seconds
const tzAbbrOffsets = {
acdt: 37800,
acst: 34200,
addt: -7200,
adt: -10800,
aedt: 39600,
aest: 36000,
ahdt: -32400,
ahst: -36000,
akdt: -28800,
akst: -32400,
amt: -13840,
apt: -10800,
ast: -14400,
awdt: 32400,
awst: 28800,
awt: -10800,
bdst: 7200,
bdt: -36000,
bmt: -14309,
bst: 3600,
cast: 34200,
cat: 7200,
cddt: -14400,
cdt: -18000,
cemt: 10800,
cest: 7200,
cet: 3600,
cmt: -15408,
cpt: -18000,
cst: -21600,
cwt: -18000,
chst: 36000,
dmt: -1521,
eat: 10800,
eddt: -10800,
edt: -14400,
eest: 10800,
eet: 7200,
emt: -26248,
ept: -14400,
est: -18000,
ewt: -14400,
ffmt: -14660,
fmt: -4056,
gdt: 39600,
gmt: 0,
gst: 36000,
hdt: -34200,
hkst: 32400,
hkt: 28800,
hmt: -19776,
hpt: -34200,
hst: -36000,
hwt: -34200,
iddt: 14400,
idt: 10800,
imt: 25025,
ist: 7200,
jdt: 36000,
jmt: 8440,
jst: 32400,
kdt: 36000,
kmt: 5736,
kst: 30600,
lst: 9394,
mddt: -18000,
mdst: 16279,
mdt: -21600,
mest: 7200,
met: 3600,
mmt: 9017,
mpt: -21600,
msd: 14400,
msk: 10800,
mst: -25200,
mwt: -21600,
nddt: -5400,
ndt: -9052,
npt: -9000,
nst: -12600,
nwt: -9000,
nzdt: 46800,
nzmt: 41400,
nzst: 43200,
pddt: -21600,
pdt: -25200,
pkst: 21600,
pkt: 18000,
plmt: 25590,
pmt: -13236,
ppmt: -17340,
ppt: -25200,
pst: -28800,
pwt: -25200,
qmt: -18840,
rmt: 5794,
sast: 7200,
sdmt: -16800,
sjmt: -20173,
smt: -13884,
sst: -39600,
tbmt: 10751,
tmt: 12344,
uct: 0,
utc: 0,
wast: 7200,
wat: 3600,
wemt: 7200,
west: 3600,
wet: 0,
wib: 25200,
wita: 28800,
wit: 32400,
wmt: 5040,
yddt: -25200,
ydt: -28800,
ypt: -28800,
yst: -32400,
ywt: -28800,
a: 3600,
b: 7200,
c: 10800,
d: 14400,
e: 18000,
f: 21600,
g: 25200,
h: 28800,
i: 32400,
k: 36000,
l: 39600,
m: 43200,
n: -3600,
o: -7200,
p: -10800,
q: -14400,
r: -18000,
s: -21600,
t: -25200,
u: -28800,
v: -32400,
w: -36000,
x: -39600,
y: -43200,
z: 0
}
const formats = {
yesterday: {
regex: /^yesterday/i,
name: 'yesterday',
callback () {
this.rd -= 1
return this.resetTime()
}
},
now: {
regex: /^now/i,
name: 'now'
// do nothing
},
noon: {
regex: /^noon/i,
name: 'noon',
callback () {
return this.resetTime() && this.time(12, 0, 0, 0)
}
},
midnightOrToday: {
regex: /^(midnight|today)/i,
name: 'midnight | today',
callback () {
return this.resetTime()
}
},
tomorrow: {
regex: /^tomorrow/i,
name: 'tomorrow',
callback () {
this.rd += 1
return this.resetTime()
}
},
timestamp: {
regex: /^@(-?\d+)/i,
name: 'timestamp',
callback (match, timestamp) {
this.rs += +timestamp
this.y = 1970
this.m = 0
this.d = 1
this.dates = 0
return this.resetTime() && this.zone(0)
}
},
firstOrLastDay: {
regex: /^(first|last) day of/i,
name: 'firstdayof | lastdayof',
callback (match, day) {
if (day.toLowerCase() === 'first') {
this.firstOrLastDayOfMonth = 1
} else {
this.firstOrLastDayOfMonth = -1
}
}
},
backOrFrontOf: {
regex: RegExp('^(back|front) of ' + reHour24 + reSpaceOpt + reMeridian + '?', 'i'),
name: 'backof | frontof',
callback (match, side, hours, meridian) {
const back = side.toLowerCase() === 'back'
let hour = +hours
let minute = 15
if (!back) {
hour -= 1
minute = 45
}
hour = processMeridian(hour, meridian)
return this.resetTime() && this.time(hour, minute, 0, 0)
}
},
weekdayOf: {
regex: RegExp('^(' + reReltextnumber + '|' + reReltexttext + ')' + reSpace + '(' + reDayfull + '|' + reDayabbr + ')' + reSpace + 'of', 'i'),
name: 'weekdayof'
// todo
},
mssqltime: {
regex: RegExp('^' + reHour12 + ':' + reMinutelz + ':' + reSecondlz + '[:.]([0-9]+)' + reMeridian, 'i'),
name: 'mssqltime',
callback (match, hour, minute, second, frac, meridian) {
return this.time(processMeridian(+hour, meridian), +minute, +second, +frac.substr(0, 3))
}
},
timeLong12: {
regex: RegExp('^' + reHour12 + '[:.]' + reMinute + '[:.]' + reSecondlz + reSpaceOpt + reMeridian, 'i'),
name: 'timelong12',
callback (match, hour, minute, second, meridian) {
return this.time(processMeridian(+hour, meridian), +minute, +second, 0)
}
},
timeShort12: {
regex: RegExp('^' + reHour12 + '[:.]' + reMinutelz + reSpaceOpt + reMeridian, 'i'),
name: 'timeshort12',
callback (match, hour, minute, meridian) {
return this.time(processMeridian(+hour, meridian), +minute, 0, 0)
}
},
timeTiny12: {
regex: RegExp('^' + reHour12 + reSpaceOpt + reMeridian, 'i'),
name: 'timetiny12',
callback (match, hour, meridian) {
return this.time(processMeridian(+hour, meridian), 0, 0, 0)
}
},
soap: {
regex: RegExp('^' + reYear4 + '-' + reMonthlz + '-' + reDaylz + 'T' + reHour24lz + ':' + reMinutelz + ':' + reSecondlz + reFrac + reTzCorrection + '?', 'i'),
name: 'soap',
callback (match, year, month, day, hour, minute, second, frac, tzCorrection) {
return this.ymd(+year, month - 1, +day) &&
this.time(+hour, +minute, +second, +frac.substr(0, 3)) &&
this.zone(processTzCorrection(tzCorrection))
}
},
wddx: {
regex: RegExp('^' + reYear4 + '-' + reMonth + '-' + reDay + 'T' + reHour24 + ':' + reMinute + ':' + reSecond),
name: 'wddx',
callback (match, year, month, day, hour, minute, second) {
return this.ymd(+year, month - 1, +day) && this.time(+hour, +minute, +second, 0)
}
},
exif: {
regex: RegExp('^' + reYear4 + ':' + reMonthlz + ':' + reDaylz + ' ' + reHour24lz + ':' + reMinutelz + ':' + reSecondlz, 'i'),
name: 'exif',
callback (match, year, month, day, hour, minute, second) {
return this.ymd(+year, month - 1, +day) && this.time(+hour, +minute, +second, 0)
}
},
xmlRpc: {
regex: RegExp('^' + reYear4 + reMonthlz + reDaylz + 'T' + reHour24 + ':' + reMinutelz + ':' + reSecondlz),
name: 'xmlrpc',
callback (match, year, month, day, hour, minute, second) {
return this.ymd(+year, month - 1, +day) && this.time(+hour, +minute, +second, 0)
}
},
xmlRpcNoColon: {
regex: RegExp('^' + reYear4 + reMonthlz + reDaylz + '[Tt]' + reHour24 + reMinutelz + reSecondlz),
name: 'xmlrpcnocolon',
callback (match, year, month, day, hour, minute, second) {
return this.ymd(+year, month - 1, +day) && this.time(+hour, +minute, +second, 0)
}
},
clf: {
regex: RegExp('^' + reDay + '/(' + reMonthAbbr + ')/' + reYear4 + ':' + reHour24lz + ':' + reMinutelz + ':' + reSecondlz + reSpace + reTzCorrection, 'i'),
name: 'clf',
callback (match, day, month, year, hour, minute, second, tzCorrection) {
return this.ymd(+year, lookupMonth(month), +day) &&
this.time(+hour, +minute, +second, 0) &&
this.zone(processTzCorrection(tzCorrection))
}
},
iso8601long: {
regex: RegExp('^t?' + reHour24 + '[:.]' + reMinute + '[:.]' + reSecond + reFrac, 'i'),
name: 'iso8601long',
callback (match, hour, minute, second, frac) {
return this.time(+hour, +minute, +second, +frac.substr(0, 3))
}
},
dateTextual: {
regex: RegExp('^' + reMonthText + '[ .\\t-]*' + reDay + '[,.stndrh\\t ]+' + reYear, 'i'),
name: 'datetextual',
callback (match, month, day, year) {
return this.ymd(processYear(year), lookupMonth(month), +day)
}
},
pointedDate4: {
regex: RegExp('^' + reDay + '[.\\t-]' + reMonth + '[.-]' + reYear4),
name: 'pointeddate4',
callback (match, day, month, year) {
return this.ymd(+year, month - 1, +day)
}
},
pointedDate2: {
regex: RegExp('^' + reDay + '[.\\t]' + reMonth + '\\.' + reYear2),
name: 'pointeddate2',
callback (match, day, month, year) {
return this.ymd(processYear(year), month - 1, +day)
}
},
timeLong24: {
regex: RegExp('^t?' + reHour24 + '[:.]' + reMinute + '[:.]' + reSecond),
name: 'timelong24',
callback (match, hour, minute, second) {
return this.time(+hour, +minute, +second, 0)
}
},
dateNoColon: {
regex: RegExp('^' + reYear4 + reMonthlz + reDaylz),
name: 'datenocolon',
callback (match, year, month, day) {
return this.ymd(+year, month - 1, +day)
}
},
pgydotd: {
regex: RegExp('^' + reYear4 + '\\.?' + reDayOfYear),
name: 'pgydotd',
callback (match, year, day) {
return this.ymd(+year, 0, +day)
}
},
timeShort24: {
regex: RegExp('^t?' + reHour24 + '[:.]' + reMinute, 'i'),
name: 'timeshort24',
callback (match, hour, minute) {
return this.time(+hour, +minute, 0, 0)
}
},
iso8601noColon: {
regex: RegExp('^t?' + reHour24lz + reMinutelz + reSecondlz, 'i'),
name: 'iso8601nocolon',
callback (match, hour, minute, second) {
return this.time(+hour, +minute, +second, 0)
}
},
iso8601dateSlash: {
// eventhough the trailing slash is optional in PHP
// here it's mandatory and inputs without the slash
// are handled by dateslash
regex: RegExp('^' + reYear4 + '/' + reMonthlz + '/' + reDaylz + '/'),
name: 'iso8601dateslash',
callback (match, year, month, day) {
return this.ymd(+year, month - 1, +day)
}
},
dateSlash: {
regex: RegExp('^' + reYear4 + '/' + reMonth + '/' + reDay),
name: 'dateslash',
callback (match, year, month, day) {
return this.ymd(+year, month - 1, +day)
}
},
american: {
regex: RegExp('^' + reMonth + '/' + reDay + '/' + reYear),
name: 'american',
callback (match, month, day, year) {
return this.ymd(processYear(year), month - 1, +day)
}
},
americanShort: {
regex: RegExp('^' + reMonth + '/' + reDay),
name: 'americanshort',
callback (match, month, day) {
return this.ymd(this.y, month - 1, +day)
}
},
gnuDateShortOrIso8601date2: {
// iso8601date2 is complete subset of gnudateshort
regex: RegExp('^' + reYear + '-' + reMonth + '-' + reDay),
name: 'gnudateshort | iso8601date2',
callback (match, year, month, day) {
return this.ymd(processYear(year), month - 1, +day)
}
},
iso8601date4: {
regex: RegExp('^' + reYear4withSign + '-' + reMonthlz + '-' + reDaylz),
name: 'iso8601date4',
callback (match, year, month, day) {
return this.ymd(+year, month - 1, +day)
}
},
gnuNoColon: {
regex: RegExp('^t?' + reHour24lz + reMinutelz, 'i'),
name: 'gnunocolon',
callback (match, hour, minute) {
// this rule is a special case
// if time was already set once by any preceding rule, it sets the captured value as year
switch (this.times) {
case 0:
return this.time(+hour, +minute, 0, this.f)
case 1:
this.y = hour * 100 + +minute
this.times++
return true
default:
return false
}
}
},
gnuDateShorter: {
regex: RegExp('^' + reYear4 + '-' + reMonth),
name: 'gnudateshorter',
callback (match, year, month) {
return this.ymd(+year, month - 1, 1)
}
},
pgTextReverse: {
// note: allowed years are from 32-9999
// years below 32 should be treated as days in datefull
regex: RegExp('^' + '(\\d{3,4}|[4-9]\\d|3[2-9])-(' + reMonthAbbr + ')-' + reDaylz, 'i'),
name: 'pgtextreverse',
callback (match, year, month, day) {
return this.ymd(processYear(year), lookupMonth(month), +day)
}
},
dateFull: {
regex: RegExp('^' + reDay + '[ \\t.-]*' + reMonthText + '[ \\t.-]*' + reYear, 'i'),
name: 'datefull',
callback (match, day, month, year) {
return this.ymd(processYear(year), lookupMonth(month), +day)
}
},
dateNoDay: {
regex: RegExp('^' + reMonthText + '[ .\\t-]*' + reYear4, 'i'),
name: 'datenoday',
callback (match, month, year) {
return this.ymd(+year, lookupMonth(month), 1)
}
},
dateNoDayRev: {
regex: RegExp('^' + reYear4 + '[ .\\t-]*' + reMonthText, 'i'),
name: 'datenodayrev',
callback (match, year, month) {
return this.ymd(+year, lookupMonth(month), 1)
}
},
pgTextShort: {
regex: RegExp('^(' + reMonthAbbr + ')-' + reDaylz + '-' + reYear, 'i'),
name: 'pgtextshort',
callback (match, month, day, year) {
return this.ymd(processYear(year), lookupMonth(month), +day)
}
},
dateNoYear: {
regex: RegExp('^' + reDateNoYear, 'i'),
name: 'datenoyear',
callback (match, month, day) {
return this.ymd(this.y, lookupMonth(month), +day)
}
},
dateNoYearRev: {
regex: RegExp('^' + reDay + '[ .\\t-]*' + reMonthText, 'i'),
name: 'datenoyearrev',
callback (match, day, month) {
return this.ymd(this.y, lookupMonth(month), +day)
}
},
isoWeekDay: {
regex: RegExp('^' + reYear4 + '-?W' + reWeekOfYear + '(?:-?([0-7]))?'),
name: 'isoweekday | isoweek',
callback (match, year, week, day) {
day = day ? +day : 1
if (!this.ymd(+year, 0, 1)) {
return false
}
// get day of week for Jan 1st
let dayOfWeek = new Date(this.y, this.m, this.d).getDay()
// and use the day to figure out the offset for day 1 of week 1
dayOfWeek = 0 - (dayOfWeek > 4 ? dayOfWeek - 7 : dayOfWeek)
this.rd += dayOfWeek + ((week - 1) * 7) + day
}
},
relativeText: {
regex: RegExp('^(' + reReltextnumber + '|' + reReltexttext + ')' + reSpace + '(' + reReltextunit + ')', 'i'),
name: 'relativetext',
callback (match, relValue, relUnit) {
// todo: implement handling of 'this time-unit'
// eslint-disable-next-line no-unused-vars
const { amount, behavior } = lookupRelative(relValue)
switch (relUnit.toLowerCase()) {
case 'sec':
case 'secs':
case 'second':
case 'seconds':
this.rs += amount
break
case 'min':
case 'mins':
case 'minute':
case 'minutes':
this.ri += amount
break
case 'hour':
case 'hours':
this.rh += amount
break
case 'day':
case 'days':
this.rd += amount
break
case 'fortnight':
case 'fortnights':
case 'forthnight':
case 'forthnights':
this.rd += amount * 14
break
case 'week':
case 'weeks':
this.rd += amount * 7
break
case 'month':
case 'months':
this.rm += amount
break
case 'year':
case 'years':
this.ry += amount
break
case 'mon': case 'monday':
case 'tue': case 'tuesday':
case 'wed': case 'wednesday':
case 'thu': case 'thursday':
case 'fri': case 'friday':
case 'sat': case 'saturday':
case 'sun': case 'sunday':
this.resetTime()
this.weekday = lookupWeekday(relUnit, 7)
this.weekdayBehavior = 1
this.rd += (amount > 0 ? amount - 1 : amount) * 7
break
case 'weekday':
case 'weekdays':
// todo
break
}
}
},
relative: {
regex: RegExp('^([+-]*)[ \\t]*(\\d+)' + reSpaceOpt + '(' + reReltextunit + '|week)', 'i'),
name: 'relative',
callback (match, signs, relValue, relUnit) {
const minuses = signs.replace(/[^-]/g, '').length
const amount = +relValue * Math.pow(-1, minuses)
switch (relUnit.toLowerCase()) {
case 'sec':
case 'secs':
case 'second':
case 'seconds':
this.rs += amount
break
case 'min':
case 'mins':
case 'minute':
case 'minutes':
this.ri += amount
break
case 'hour':
case 'hours':
this.rh += amount
break
case 'day':
case 'days':
this.rd += amount
break
case 'fortnight':
case 'fortnights':
case 'forthnight':
case 'forthnights':
this.rd += amount * 14
break
case 'week':
case 'weeks':
this.rd += amount * 7
break
case 'month':
case 'months':
this.rm += amount
break
case 'year':
case 'years':
this.ry += amount
break
case 'mon': case 'monday':
case 'tue': case 'tuesday':
case 'wed': case 'wednesday':
case 'thu': case 'thursday':
case 'fri': case 'friday':
case 'sat': case 'saturday':
case 'sun': case 'sunday':
this.resetTime()
this.weekday = lookupWeekday(relUnit, 7)
this.weekdayBehavior = 1
this.rd += (amount > 0 ? amount - 1 : amount) * 7
break
case 'weekday':
case 'weekdays':
// todo
break
}
}
},
dayText: {
regex: RegExp('^(' + reDaytext + ')', 'i'),
name: 'daytext',
callback (match, dayText) {
this.resetTime()
this.weekday = lookupWeekday(dayText, 0)
if (this.weekdayBehavior !== 2) {
this.weekdayBehavior = 1
}
}
},
relativeTextWeek: {
regex: RegExp('^(' + reReltexttext + ')' + reSpace + 'week', 'i'),
name: 'relativetextweek',
callback (match, relText) {
this.weekdayBehavior = 2
switch (relText.toLowerCase()) {
case 'this':
this.rd += 0
break
case 'next':
this.rd += 7
break
case 'last':
case 'previous':
this.rd -= 7
break
}
if (isNaN(this.weekday)) {
this.weekday = 1
}
}
},
monthFullOrMonthAbbr: {
regex: RegExp('^(' + reMonthFull + '|' + reMonthAbbr + ')', 'i'),
name: 'monthfull | monthabbr',
callback (match, month) {
return this.ymd(this.y, lookupMonth(month), this.d)
}
},
tzCorrection: {
regex: RegExp('^' + reTzCorrection, 'i'),
name: 'tzcorrection',
callback (tzCorrection) {
return this.zone(processTzCorrection(tzCorrection))
}
},
tzAbbr: {
regex: RegExp('^' + reTzAbbr),
name: 'tzabbr',
callback (match, abbr) {
const offset = tzAbbrOffsets[abbr.toLowerCase()]
if (isNaN(offset)) {
return false
}
return this.zone(offset)
}
},
ago: {
regex: /^ago/i,
name: 'ago',
callback () {
this.ry = -this.ry
this.rm = -this.rm
this.rd = -this.rd
this.rh = -this.rh
this.ri = -this.ri
this.rs = -this.rs
this.rf = -this.rf
}
},
year4: {
regex: RegExp('^' + reYear4),
name: 'year4',
callback (match, year) {
this.y = +year
return true
}
},
whitespace: {
regex: /^[ .,\t]+/,
name: 'whitespace'
// do nothing
},
dateShortWithTimeLong: {
regex: RegExp('^' + reDateNoYear + 't?' + reHour24 + '[:.]' + reMinute + '[:.]' + reSecond, 'i'),
name: 'dateshortwithtimelong',
callback (match, month, day, hour, minute, second) {
return this.ymd(this.y, lookupMonth(month), +day) && this.time(+hour, +minute, +second, 0)
}
},
dateShortWithTimeLong12: {
regex: RegExp('^' + reDateNoYear + reHour12 + '[:.]' + reMinute + '[:.]' + reSecondlz + reSpaceOpt + reMeridian, 'i'),
name: 'dateshortwithtimelong12',
callback (match, month, day, hour, minute, second, meridian) {
return this.ymd(this.y, lookupMonth(month), +day) && this.time(processMeridian(+hour, meridian), +minute, +second, 0)
}
},
dateShortWithTimeShort: {
regex: RegExp('^' + reDateNoYear + 't?' + reHour24 + '[:.]' + reMinute, 'i'),
name: 'dateshortwithtimeshort',
callback (match, month, day, hour, minute) {
return this.ymd(this.y, lookupMonth(month), +day) && this.time(+hour, +minute, 0, 0)
}
},
dateShortWithTimeShort12: {
regex: RegExp('^' + reDateNoYear + reHour12 + '[:.]' + reMinutelz + reSpaceOpt + reMeridian, 'i'),
name: 'dateshortwithtimeshort12',
callback (match, month, day, hour, minute, meridian) {
return this.ymd(this.y, lookupMonth(month), +day) && this.time(processMeridian(+hour, meridian), +minute, 0, 0)
}
}
}
const resultProto = {
// date
y: NaN,
m: NaN,
d: NaN,
// time
h: NaN,
i: NaN,
s: NaN,
f: NaN,
// relative shifts
ry: 0,
rm: 0,
rd: 0,
rh: 0,
ri: 0,
rs: 0,
rf: 0,
// weekday related shifts
weekday: NaN,
weekdayBehavior: 0,
// first or last day of month
// 0 none, 1 first, -1 last
firstOrLastDayOfMonth: 0,
// timezone correction in minutes
z: NaN,
// counters
dates: 0,
times: 0,
zones: 0,
// helper functions
ymd (y, m, d) {
if (this.dates > 0) {
return false
}
this.dates++
this.y = y
this.m = m
this.d = d
return true
},
time (h, i, s, f) {
if (this.times > 0) {
return false
}
this.times++
this.h = h
this.i = i
this.s = s
this.f = f
return true
},
resetTime () {
this.h = 0
this.i = 0
this.s = 0
this.f = 0
this.times = 0
return true
},
zone (minutes) {
if (this.zones <= 1) {
this.zones++
this.z = minutes
return true
}
return false
},
toDate (relativeTo) {
if (this.dates && !this.times) {
this.h = this.i = this.s = this.f = 0
}
// fill holes
if (isNaN(this.y)) {
this.y = relativeTo.getFullYear()
}
if (isNaN(this.m)) {
this.m = relativeTo.getMonth()
}
if (isNaN(this.d)) {
this.d = relativeTo.getDate()
}
if (isNaN(this.h)) {
this.h = relativeTo.getHours()
}
if (isNaN(this.i)) {
this.i = relativeTo.getMinutes()
}
if (isNaN(this.s)) {
this.s = relativeTo.getSeconds()
}
if (isNaN(this.f)) {
this.f = relativeTo.getMilliseconds()
}
// adjust special early
switch (this.firstOrLastDayOfMonth) {
case 1:
this.d = 1
break
case -1:
this.d = 0
this.m += 1
break
}
if (!isNaN(this.weekday)) {
const date = new Date(relativeTo.getTime())
date.setFullYear(this.y, this.m, this.d)
date.setHours(this.h, this.i, this.s, this.f)
const dow = date.getDay()
if (this.weekdayBehavior === 2) {
// To make "this week" work, where the current day of week is a "sunday"
if (dow === 0 && this.weekday !== 0) {
this.weekday = -6
}
// To make "sunday this week" work, where the current day of week is not a "sunday"
if (this.weekday === 0 && dow !== 0) {
this.weekday = 7
}
this.d -= dow
this.d += this.weekday
} else {
let diff = this.weekday - dow
// some PHP magic
if ((this.rd < 0 && diff < 0) || (this.rd >= 0 && diff <= -this.weekdayBehavior)) {
diff += 7
}
if (this.weekday >= 0) {
this.d += diff
} else {
this.d -= (7 - (Math.abs(this.weekday) - dow))
}
this.weekday = NaN
}
}
// adjust relative
this.y += this.ry
this.m += this.rm
this.d += this.rd
this.h += this.rh
this.i += this.ri
this.s += this.rs
this.f += this.rf
this.ry = this.rm = this.rd = 0
this.rh = this.ri = this.rs = this.rf = 0
const result = new Date(relativeTo.getTime())
// since Date constructor treats years <= 99 as 1900+
// it can't be used, thus this weird way
result.setFullYear(this.y, this.m, this.d)
result.setHours(this.h, this.i, this.s, this.f)
// note: this is done twice in PHP
// early when processing special relatives
// and late
// todo: check if the logic can be reduced
// to just one time action
switch (this.firstOrLastDayOfMonth) {
case 1:
result.setDate(1)
break
case -1:
result.setMonth(result.getMonth() + 1, 0)
break
}
// adjust timezone
if (!isNaN(this.z) && result.getTimezoneOffset() !== this.z) {
result.setUTCFullYear(
result.getFullYear(),
result.getMonth(),
result.getDate())
result.setUTCHours(
result.getHours(),
result.getMinutes(),
result.getSeconds() - this.z,
result.getMilliseconds())
}
return result
}
}
module.exports = function strtotime (str, now) {
// discuss at: https://locutus.io/php/strtotime/
// original by: Caio Ariede (https://caioariede.com)
// improved by: Kevin van Zonneveld (https://kvz.io)
// improved by: Caio Ariede (https://caioariede.com)
// improved by: A. Matías Quezada (https://amatiasq.com)
// improved by: preuter
// improved by: Brett Zamir (https://brett-zamir.me)
// improved by: Mirko Faber
// input by: David
// bugfixed by: Wagner B. Soares
// bugfixed by: Artur Tchernychev
// bugfixed by: Stephan Bösch-Plepelits (https://github.com/plepe)
// reimplemented by: Rafał Kukawski
// note 1: Examples all have a fixed timestamp to prevent
// note 1: tests to fail because of variable time(zones)
// example 1: strtotime('+1 day', 1129633200)
// returns 1: 1129719600
// example 2: strtotime('+1 week 2 days 4 hours 2 seconds', 1129633200)
// returns 2: 1130425202
// example 3: strtotime('last month', 1129633200)
// returns 3: 1127041200
// example 4: strtotime('2009-05-04 08:30:00+00')
// returns 4: 1241425800
// example 5: strtotime('2009-05-04 08:30:00+02:00')
// returns 5: 1241418600
// example 6: strtotime('2009-05-04 08:30:00 YWT')
// returns 6: 1241454600
if (now == null) {
now = Math.floor(Date.now() / 1000)
}
// the rule order is important
// if multiple rules match, the longest match wins
// if multiple rules match the same string, the first match wins
const rules = [
formats.yesterday,
formats.now,
formats.noon,
formats.midnightOrToday,
formats.tomorrow,
formats.timestamp,
formats.firstOrLastDay,
formats.backOrFrontOf,
// formats.weekdayOf, // not yet implemented
formats.timeTiny12,
formats.timeShort12,
formats.timeLong12,
formats.mssqltime,
formats.timeShort24,
formats.timeLong24,
formats.iso8601long,
formats.gnuNoColon,
formats.iso8601noColon,
formats.americanShort,
formats.american,
formats.iso8601date4,
formats.iso8601dateSlash,
formats.dateSlash,
formats.gnuDateShortOrIso8601date2,
formats.gnuDateShorter,
formats.dateFull,
formats.pointedDate4,
formats.pointedDate2,
formats.dateNoDay,
formats.dateNoDayRev,
formats.dateTextual,
formats.dateNoYear,
formats.dateNoYearRev,
formats.dateNoColon,
formats.xmlRpc,
formats.xmlRpcNoColon,
formats.soap,
formats.wddx,
formats.exif,
formats.pgydotd,
formats.isoWeekDay,
formats.pgTextShort,
formats.pgTextReverse,
formats.clf,
formats.year4,
formats.ago,
formats.dayText,
formats.relativeTextWeek,
formats.relativeText,
formats.monthFullOrMonthAbbr,
formats.tzCorrection,
formats.tzAbbr,
formats.dateShortWithTimeShort12,
formats.dateShortWithTimeLong12,
formats.dateShortWithTimeShort,
formats.dateShortWithTimeLong,
formats.relative,
formats.whitespace
]
const result = Object.create(resultProto)
while (str.length) {
let longestMatch = null
let finalRule = null
for (let i = 0, l = rules.length; i < l; i++) {
const format = rules[i]
const match = str.match(format.regex)
if (match) {
if (!longestMatch || match[0].length > longestMatch[0].length) {
longestMatch = match
finalRule = format
}
}
}
if (!finalRule || (finalRule.callback && finalRule.callback.apply(result, longestMatch) === false)) {
return false
}
str = str.substr(longestMatch[0].length)
finalRule = null
longestMatch = null
}
return Math.floor(result.toDate(new Date(now * 1000)) / 1000)
} | the_stack |
import {
FocusMonitor,
FocusOrigin,
FocusTrap,
FocusTrapFactory,
InteractivityChecker,
} from '@angular/cdk/a11y';
import {OverlayRef} from '@angular/cdk/overlay';
import {_getFocusedElementPierceShadowDom} from '@angular/cdk/platform';
import {
BasePortalOutlet,
CdkPortalOutlet,
ComponentPortal,
DomPortal,
TemplatePortal,
} from '@angular/cdk/portal';
import {DOCUMENT} from '@angular/common';
import {
ChangeDetectionStrategy,
Component,
ComponentRef,
ElementRef,
EmbeddedViewRef,
Inject,
NgZone,
OnDestroy,
Optional,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import {DialogConfig} from './dialog-config';
export function throwDialogContentAlreadyAttachedError() {
throw Error('Attempting to attach dialog content after content is already attached');
}
/**
* Internal component that wraps user-provided dialog content.
* @docs-private
*/
@Component({
selector: 'cdk-dialog-container',
templateUrl: './dialog-container.html',
styleUrls: ['dialog-container.css'],
encapsulation: ViewEncapsulation.None,
// Using OnPush for dialogs caused some G3 sync issues. Disabled until we can track them down.
// tslint:disable-next-line:validate-decorators
changeDetection: ChangeDetectionStrategy.Default,
host: {
'class': 'cdk-dialog-container',
'tabindex': '-1',
'[attr.id]': '_config.id || null',
'[attr.role]': '_config.role',
'[attr.aria-modal]': '_config.ariaModal',
'[attr.aria-labelledby]': '_config.ariaLabel ? null : _ariaLabelledBy',
'[attr.aria-label]': '_config.ariaLabel',
'[attr.aria-describedby]': '_config.ariaDescribedBy || null',
},
})
export class CdkDialogContainer<C extends DialogConfig = DialogConfig>
extends BasePortalOutlet
implements OnDestroy
{
protected _document: Document;
/** The portal outlet inside of this container into which the dialog content will be loaded. */
@ViewChild(CdkPortalOutlet, {static: true}) _portalOutlet: CdkPortalOutlet;
/** The class that traps and manages focus within the dialog. */
private _focusTrap: FocusTrap;
/** Element that was focused before the dialog was opened. Save this to restore upon close. */
private _elementFocusedBeforeDialogWasOpened: HTMLElement | null = null;
/**
* Type of interaction that led to the dialog being closed. This is used to determine
* whether the focus style will be applied when returning focus to its original location
* after the dialog is closed.
*/
_closeInteractionType: FocusOrigin | null = null;
/** ID of the element that should be considered as the dialog's label. */
_ariaLabelledBy: string | null;
constructor(
protected _elementRef: ElementRef,
protected _focusTrapFactory: FocusTrapFactory,
@Optional() @Inject(DOCUMENT) _document: any,
@Inject(DialogConfig) readonly _config: C,
private _interactivityChecker: InteractivityChecker,
private _ngZone: NgZone,
private _overlayRef: OverlayRef,
private _focusMonitor?: FocusMonitor,
) {
super();
this._ariaLabelledBy = this._config.ariaLabelledBy || null;
this._document = _document;
}
protected _contentAttached() {
this._initializeFocusTrap();
this._handleBackdropClicks();
this._captureInitialFocus();
}
/**
* Can be used by child classes to customize the initial focus
* capturing behavior (e.g. if it's tied to an animation).
*/
protected _captureInitialFocus() {
this._trapFocus();
}
ngOnDestroy() {
this._restoreFocus();
}
/**
* Attach a ComponentPortal as content to this dialog container.
* @param portal Portal to be attached as the dialog content.
*/
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throwDialogContentAlreadyAttachedError();
}
const result = this._portalOutlet.attachComponentPortal(portal);
this._contentAttached();
return result;
}
/**
* Attach a TemplatePortal as content to this dialog container.
* @param portal Portal to be attached as the dialog content.
*/
attachTemplatePortal<T>(portal: TemplatePortal<T>): EmbeddedViewRef<T> {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throwDialogContentAlreadyAttachedError();
}
const result = this._portalOutlet.attachTemplatePortal(portal);
this._contentAttached();
return result;
}
/**
* Attaches a DOM portal to the dialog container.
* @param portal Portal to be attached.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
override attachDomPortal = (portal: DomPortal) => {
if (this._portalOutlet.hasAttached() && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throwDialogContentAlreadyAttachedError();
}
const result = this._portalOutlet.attachDomPortal(portal);
this._contentAttached();
return result;
};
// TODO(crisbeto): this shouldn't be exposed, but there are internal references to it.
/** Captures focus if it isn't already inside the dialog. */
_recaptureFocus() {
if (!this._containsFocus()) {
this._trapFocus();
}
}
/**
* Focuses the provided element. If the element is not focusable, it will add a tabIndex
* attribute to forcefully focus it. The attribute is removed after focus is moved.
* @param element The element to focus.
*/
private _forceFocus(element: HTMLElement, options?: FocusOptions) {
if (!this._interactivityChecker.isFocusable(element)) {
element.tabIndex = -1;
// The tabindex attribute should be removed to avoid navigating to that element again
this._ngZone.runOutsideAngular(() => {
const callback = () => {
element.removeEventListener('blur', callback);
element.removeEventListener('mousedown', callback);
element.removeAttribute('tabindex');
};
element.addEventListener('blur', callback);
element.addEventListener('mousedown', callback);
});
}
element.focus(options);
}
/**
* Focuses the first element that matches the given selector within the focus trap.
* @param selector The CSS selector for the element to set focus to.
*/
private _focusByCssSelector(selector: string, options?: FocusOptions) {
let elementToFocus = this._elementRef.nativeElement.querySelector(
selector,
) as HTMLElement | null;
if (elementToFocus) {
this._forceFocus(elementToFocus, options);
}
}
/**
* Moves the focus inside the focus trap. When autoFocus is not set to 'dialog', if focus
* cannot be moved then focus will go to the dialog container.
*/
protected _trapFocus() {
const element = this._elementRef.nativeElement;
// If were to attempt to focus immediately, then the content of the dialog would not yet be
// ready in instances where change detection has to run first. To deal with this, we simply
// wait for the microtask queue to be empty when setting focus when autoFocus isn't set to
// dialog. If the element inside the dialog can't be focused, then the container is focused
// so the user can't tab into other elements behind it.
switch (this._config.autoFocus) {
case false:
case 'dialog':
// Ensure that focus is on the dialog container. It's possible that a different
// component tried to move focus while the open animation was running. See:
// https://github.com/angular/components/issues/16215. Note that we only want to do this
// if the focus isn't inside the dialog already, because it's possible that the consumer
// turned off `autoFocus` in order to move focus themselves.
if (!this._containsFocus()) {
element.focus();
}
break;
case true:
case 'first-tabbable':
this._focusTrap.focusInitialElementWhenReady().then(focusedSuccessfully => {
// If we weren't able to find a focusable element in the dialog, then focus the dialog
// container instead.
if (!focusedSuccessfully) {
this._focusDialogContainer();
}
});
break;
case 'first-heading':
this._focusByCssSelector('h1, h2, h3, h4, h5, h6, [role="heading"]');
break;
default:
this._focusByCssSelector(this._config.autoFocus!);
break;
}
}
/** Restores focus to the element that was focused before the dialog opened. */
private _restoreFocus() {
const focusConfig = this._config.restoreFocus;
let focusTargetElement: HTMLElement | null = null;
if (typeof focusConfig === 'string') {
focusTargetElement = this._document.querySelector(focusConfig);
} else if (typeof focusConfig === 'boolean') {
focusTargetElement = focusConfig ? this._elementFocusedBeforeDialogWasOpened : null;
} else if (focusConfig) {
focusTargetElement = focusConfig;
}
// We need the extra check, because IE can set the `activeElement` to null in some cases.
if (
this._config.restoreFocus &&
focusTargetElement &&
typeof focusTargetElement.focus === 'function'
) {
const activeElement = _getFocusedElementPierceShadowDom();
const element = this._elementRef.nativeElement;
// Make sure that focus is still inside the dialog or is on the body (usually because a
// non-focusable element like the backdrop was clicked) before moving it. It's possible that
// the consumer moved it themselves before the animation was done, in which case we shouldn't
// do anything.
if (
!activeElement ||
activeElement === this._document.body ||
activeElement === element ||
element.contains(activeElement)
) {
if (this._focusMonitor) {
this._focusMonitor.focusVia(focusTargetElement, this._closeInteractionType);
this._closeInteractionType = null;
} else {
focusTargetElement.focus();
}
}
}
if (this._focusTrap) {
this._focusTrap.destroy();
}
}
/** Focuses the dialog container. */
private _focusDialogContainer() {
// Note that there is no focus method when rendering on the server.
if (this._elementRef.nativeElement.focus) {
this._elementRef.nativeElement.focus();
}
}
/** Returns whether focus is inside the dialog. */
private _containsFocus() {
const element = this._elementRef.nativeElement;
const activeElement = _getFocusedElementPierceShadowDom();
return element === activeElement || element.contains(activeElement);
}
/** Sets up the focus trap. */
private _initializeFocusTrap() {
this._focusTrap = this._focusTrapFactory.create(this._elementRef.nativeElement);
// Save the previously focused element. This element will be re-focused
// when the dialog closes.
if (this._document) {
this._elementFocusedBeforeDialogWasOpened = _getFocusedElementPierceShadowDom();
}
}
/** Sets up the listener that handles clicks on the dialog backdrop. */
private _handleBackdropClicks() {
// Clicking on the backdrop will move focus out of dialog.
// Recapture it if closing via the backdrop is disabled.
this._overlayRef.backdropClick().subscribe(() => {
if (this._config.disableClose) {
this._recaptureFocus();
}
});
}
} | the_stack |
import { NextFunction, Request, Response } from "express"
import { File } from "formidable"
import { filterXSS } from "xss"
import { DashboardContents, TaskOptions } from "../components/dashboard"
import { getSubmissionTime } from "../components/util"
import { FormField } from "../const/project"
import { DatasetExport, ItemExport } from "../types/export"
import { Project } from "../types/project"
import {
createProject,
createTasks,
parseFiles,
parseSingleFile,
parseForm,
readConfig,
filterIntersectedPolygonsInProject
} from "./create_project"
import { convertStateToExport } from "./export"
import { FileStorage } from "./file_storage"
import Logger from "./logger"
import { getExportName } from "./path"
import { ProjectStore } from "./project_store"
import { S3Storage } from "./s3_storage"
import {
getDefaultTaskOptions,
getProjectOptions,
getProjectStats,
getTaskOptions
} from "./stats"
import { Storage } from "./storage"
import { UserManager } from "./user_manager"
import { parseProjectName } from "./util"
import { QueryArg } from "../const/common"
/**
* Wraps HTTP listeners
*/
export class Listeners {
/** the project store */
protected projectStore: ProjectStore
/** the user manager */
protected userManager: UserManager
/**
* Constructor
*
* @param projectStore
* @param userManager
*/
constructor(projectStore: ProjectStore, userManager: UserManager) {
this.projectStore = projectStore
this.userManager = userManager
}
/**
* Logs requests to static or dynamic files
*
* @param req
* @param _res
* @param next
*/
public loggingHandler(
req: Request,
_res: Response,
next: NextFunction
): void {
const log = `Requesting ${req.originalUrl}`
Logger.info(log)
next()
}
/**
* Handles getting all projects' names
*
* @param _req
* @param res
*/
public async projectNameHandler(_req: Request, res: Response): Promise<void> {
let projects: string[]
const defaultProjects = ["No existing project"]
try {
projects = await this.projectStore.getExistingProjects()
if (projects.length === 0) {
projects = defaultProjects
}
} catch (err) {
Logger.error(err)
projects = defaultProjects
}
const projectNames = JSON.stringify(projects)
res.send(projectNames)
res.end()
}
/**
* Handles posting export
*
* @param req
* @param res
*/
public async getExportHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidGet(req, res)) {
return
}
try {
const projectName = req.query[FormField.PROJECT_NAME] as string
// Grab the latest submissions from all tasks
const tasks = await this.projectStore.getTasksInProject(projectName)
const dataset: DatasetExport = {
frames: [],
config: {
attributes: [],
categories: []
}
}
let items: ItemExport[] = []
// Load the latest submission for each task to export
for (const task of tasks) {
try {
const taskId = task.config.taskId
const state = await this.projectStore.loadState(projectName, taskId)
items = items.concat(convertStateToExport(state))
if (dataset.config.attributes?.length === 0) {
dataset.config.attributes = state.task.config.attributes
}
if (dataset.config.categories?.length === 0) {
dataset.config.categories = state.task.config.categories
}
} catch (error) {
Logger.info(error.message)
for (const itemToLoad of task.items) {
const url = Object.values(itemToLoad.urls)[0]
const timestamp = getSubmissionTime(task.progress.submissions)
items.push({
name: url,
url,
sensor: -1,
timestamp,
videoName: itemToLoad.videoName,
attributes: {},
labels: []
})
}
}
}
dataset.frames = items
const exportJson = JSON.stringify(dataset, null, " ")
// Set relevant header and send the exported json file
res.attachment(getExportName(projectName))
res.end(Buffer.from(exportJson, "binary"), "binary")
} catch (error) {
// TODO: Be more specific about what this error may be
Logger.error(error)
res.end()
}
}
/**
* Alert the user that the sent fields were illegal
*
* @param res
*/
public badFormResponse(res: Response): void {
const err = Error("Illegal fields for project creation")
Logger.error(err)
res.status(400).send(err.message)
}
/**
* Alert the user that the task creation request was illegal
*
* @param res
*/
public badTaskResponse(res: Response): void {
const err = Error("Illegal fields for task creation")
Logger.error(err)
res.status(400).send(err.message)
}
/**
* Error if it's not a post request
*
* @param req
* @param res
*/
public checkInvalidPost(req: Request, res: Response): boolean {
if (req.method !== "POST") {
res.sendStatus(404)
res.end()
return true
}
return false
}
/**
* Error if it's not a get request
* By default, also requires non-empty queryArg parameters
*
* @param req
* @param res
* @param requireParam
*/
public checkInvalidGet(
req: Request,
res: Response,
requireParam: boolean = true
): boolean {
if (req.method !== "GET" || (requireParam && req.query === {})) {
res.sendStatus(404)
res.end()
return true
}
return false
}
/**
* Handles posted project from internal data
* Items file not required, since items can be added later
*
* @param req
* @param res
*/
public async postProjectInternalHandler(
req: Request,
res: Response
): Promise<void> {
if (this.checkInvalidPost(req, res)) {
return
}
if (
req.body === undefined ||
req.body.fields === undefined ||
req.body.files === undefined
) {
return this.badFormResponse(res)
}
/**
* Use the region/bucket specified in the request
* to access the item/category/attribute files
*/
const s3Path = req.body.fields.s3_path as string
let storage: Storage
try {
storage = new S3Storage(s3Path)
} catch (err) {
Logger.error(err)
return this.badFormResponse(res)
}
storage.setExt("")
await this.createProjectFromDicts(
storage,
req.body.fields,
req.body.files,
false,
res
)
}
/**
* Handles posted project from form data
* Items file required
*
* @param req
* @param res
*/
public async postProjectHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidPost(req, res)) {
return
}
if (req.fields === undefined || req.files === undefined) {
return this.badFormResponse(res)
}
const fields = req.fields as { [key: string]: string }
const formFiles = req.files as { [key: string]: File | undefined }
const files: { [key: string]: string } = {}
for (const key of Object.keys(formFiles)) {
const file = formFiles[key]
if (file !== undefined && file.size !== 0) {
files[key] = file.path
}
}
const storage = new FileStorage("")
storage.setExt("")
await this.createProjectFromDicts(storage, fields, files, true, res)
}
/**
* Handles tasks being added to a project
*
* @param req
* @param res
*/
public async postTasksHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidPost(req, res)) {
return
}
if (
req.body === undefined ||
req.body.items === undefined ||
req.body.projectName === undefined
) {
this.badTaskResponse(res)
return
}
// TODO: This if clause aims to solve the lgtm alert.
// Could be removed in the future if better way found.
if (req.body.items !== "examples/image_list.yml") {
throw Error(`req.body.items should be "examples/image_list.yml" here.`)
}
// Read in the data
const storage = new FileStorage("")
storage.setExt("")
const items = await readConfig<Array<Partial<ItemExport>>>(
storage,
req.body.items,
[]
)
let project: Project
let projectName: string
try {
projectName = parseProjectName(req.body.projectName)
project = await this.projectStore.loadProject(projectName)
} catch (err) {
Logger.error(err)
this.badTaskResponse(res)
return
}
// Update the project with the new items
const itemStartNum = project.items.length
project.items = project.items.concat(items)
await this.projectStore.saveProject(project)
// Update the tasks, make sure not to combine old and new items
project.items = items
const oldTasks = await this.projectStore.getTasksInProject(projectName)
const taskStartNum = oldTasks.length
await createTasks(project, this.projectStore, taskStartNum, itemStartNum)
res.sendStatus(200)
}
/**
* Get the labeling stats
*
* @param req
* @param res
*/
public async statsHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidGet(req, res)) {
return
}
try {
const projectName = req.query.name as string
const savedTasks = await this.projectStore.loadTaskStates(projectName)
const stats = getProjectStats(savedTasks)
res.send(JSON.stringify(stats))
} catch (err) {
Logger.error(err)
res.send(filterXSS(err.message))
}
}
/**
* Return dashboard info
*
* @param req
* @param res
*/
public async dashboardHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidGet(req, res)) {
return
}
try {
const projectName = req.query.name as string
const project = await this.projectStore.loadProjectInfo(projectName)
const projectMetaData = getProjectOptions(project)
const taskKeys = await this.projectStore.getTaskKeysInProject(projectName)
let taskOptions: TaskOptions[] = []
taskKeys.forEach((_taskKey: string) => {
taskOptions = taskOptions.concat(getDefaultTaskOptions(project.config))
})
const numUsers = await this.userManager.countUsers(projectName)
const contents: DashboardContents = {
projectMetaData,
taskMetaDatas: taskOptions,
taskKeys,
numUsers
}
res.send(JSON.stringify(contents))
} catch (err) {
Logger.error(err)
res.send(filterXSS(err.message))
}
}
/**
* Delete a project
*
* @param req
* @param res
*/
public async deleteProjectHandler(
req: Request,
res: Response
): Promise<void> {
if (this.checkInvalidGet(req, res)) {
return
}
await this.projectStore.deleteProject(
req.query[FormField.PROJECT_NAME] as string
)
res.sendStatus(200)
}
/**
* Return task metadata
*
* @param req
* @param res
*/
public async taskMetaDataHandler(req: Request, res: Response): Promise<void> {
if (this.checkInvalidGet(req, res)) {
return
}
try {
const projectName = req.query[QueryArg.PROJECT_NAME] as string
const taskId = req.query[QueryArg.TASK_ID] as string
const savedState = await this.projectStore.loadState(projectName, taskId)
const savedTask = savedState.task
const taskOption = getTaskOptions(savedTask)
res.send(JSON.stringify(taskOption))
} catch (err) {
Logger.error(err)
res.send(filterXSS(err.message))
}
}
/**
* Finishes project creation using processed dicts
*
* @param storage
* @param itemsRequired
* @param res
*/
private async createProjectFromDicts(
storage: Storage,
fields: { [key: string]: string },
files: { [key: string]: string },
itemsRequired: boolean,
res: Response
): Promise<void> {
try {
// Parse form from request
const form = await parseForm(fields, this.projectStore)
// Parse item, category, and attribute data from the form
const formFileData =
Object.keys(files).length > 1
? await parseFiles(storage, form.labelType, files, itemsRequired)
: await parseSingleFile(storage, form.labelType, files)
// Create the project from the form data
const project = await createProject(form, formFileData)
const [filteredProject, msg] = filterIntersectedPolygonsInProject(project)
await Promise.all([
this.projectStore.saveProject(filteredProject),
// Create tasks then save them
createTasks(filteredProject, this.projectStore)
])
res.send(filterXSS(msg))
} catch (err) {
Logger.error(err)
// Alert the user that something failed
res.status(400).send(filterXSS(err.message))
}
}
} | the_stack |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config-base';
import {Readable} from 'stream';
interface Blob {}
declare class LookoutVision extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: LookoutVision.Types.ClientConfiguration)
config: Config & LookoutVision.Types.ClientConfiguration;
/**
* Creates a new dataset in an Amazon Lookout for Vision project. CreateDataset can create a training or a test dataset from a valid dataset source (DatasetSource). If you want a single dataset project, specify train for the value of DatasetType. To have a project with separate training and test datasets, call CreateDataset twice. On the first call, specify train for the value of DatasetType. On the second call, specify test for the value of DatasetType. of dataset with
*/
createDataset(params: LookoutVision.Types.CreateDatasetRequest, callback?: (err: AWSError, data: LookoutVision.Types.CreateDatasetResponse) => void): Request<LookoutVision.Types.CreateDatasetResponse, AWSError>;
/**
* Creates a new dataset in an Amazon Lookout for Vision project. CreateDataset can create a training or a test dataset from a valid dataset source (DatasetSource). If you want a single dataset project, specify train for the value of DatasetType. To have a project with separate training and test datasets, call CreateDataset twice. On the first call, specify train for the value of DatasetType. On the second call, specify test for the value of DatasetType. of dataset with
*/
createDataset(callback?: (err: AWSError, data: LookoutVision.Types.CreateDatasetResponse) => void): Request<LookoutVision.Types.CreateDatasetResponse, AWSError>;
/**
* Creates a new version of a model within an an Amazon Lookout for Vision project. CreateModel is an asynchronous operation in which Amazon Lookout for Vision trains, tests, and evaluates a new version of a model. To get the current status, check the Status field returned in the response from DescribeModel. If the project has a single dataset, Amazon Lookout for Vision internally splits the dataset to create a training and a test dataset. If the project has a training and a test dataset, Lookout for Vision uses the respective datasets to train and test the model. After training completes, the evaluation metrics are stored at the location specified in OutputConfig.
*/
createModel(params: LookoutVision.Types.CreateModelRequest, callback?: (err: AWSError, data: LookoutVision.Types.CreateModelResponse) => void): Request<LookoutVision.Types.CreateModelResponse, AWSError>;
/**
* Creates a new version of a model within an an Amazon Lookout for Vision project. CreateModel is an asynchronous operation in which Amazon Lookout for Vision trains, tests, and evaluates a new version of a model. To get the current status, check the Status field returned in the response from DescribeModel. If the project has a single dataset, Amazon Lookout for Vision internally splits the dataset to create a training and a test dataset. If the project has a training and a test dataset, Lookout for Vision uses the respective datasets to train and test the model. After training completes, the evaluation metrics are stored at the location specified in OutputConfig.
*/
createModel(callback?: (err: AWSError, data: LookoutVision.Types.CreateModelResponse) => void): Request<LookoutVision.Types.CreateModelResponse, AWSError>;
/**
* Creates an empty Amazon Lookout for Vision project. After you create the project, add a dataset by calling CreateDataset.
*/
createProject(params: LookoutVision.Types.CreateProjectRequest, callback?: (err: AWSError, data: LookoutVision.Types.CreateProjectResponse) => void): Request<LookoutVision.Types.CreateProjectResponse, AWSError>;
/**
* Creates an empty Amazon Lookout for Vision project. After you create the project, add a dataset by calling CreateDataset.
*/
createProject(callback?: (err: AWSError, data: LookoutVision.Types.CreateProjectResponse) => void): Request<LookoutVision.Types.CreateProjectResponse, AWSError>;
/**
* Deletes an existing Amazon Lookout for Vision dataset. If your the project has a single dataset, you must create a new dataset before you can create a model. If you project has a training dataset and a test dataset consider the following. If you delete the test dataset, your project reverts to a single dataset project. If you then train the model, Amazon Lookout for Vision internally splits the remaining dataset into a training and test dataset. If you delete the training dataset, you must create a training dataset before you can create a model. It might take a while to delete the dataset. To check the current status, check the Status field in the response from a call to DescribeDataset.
*/
deleteDataset(params: LookoutVision.Types.DeleteDatasetRequest, callback?: (err: AWSError, data: LookoutVision.Types.DeleteDatasetResponse) => void): Request<LookoutVision.Types.DeleteDatasetResponse, AWSError>;
/**
* Deletes an existing Amazon Lookout for Vision dataset. If your the project has a single dataset, you must create a new dataset before you can create a model. If you project has a training dataset and a test dataset consider the following. If you delete the test dataset, your project reverts to a single dataset project. If you then train the model, Amazon Lookout for Vision internally splits the remaining dataset into a training and test dataset. If you delete the training dataset, you must create a training dataset before you can create a model. It might take a while to delete the dataset. To check the current status, check the Status field in the response from a call to DescribeDataset.
*/
deleteDataset(callback?: (err: AWSError, data: LookoutVision.Types.DeleteDatasetResponse) => void): Request<LookoutVision.Types.DeleteDatasetResponse, AWSError>;
/**
* Deletes an Amazon Lookout for Vision model. You can't delete a running model. To stop a running model, use the StopModel operation.
*/
deleteModel(params: LookoutVision.Types.DeleteModelRequest, callback?: (err: AWSError, data: LookoutVision.Types.DeleteModelResponse) => void): Request<LookoutVision.Types.DeleteModelResponse, AWSError>;
/**
* Deletes an Amazon Lookout for Vision model. You can't delete a running model. To stop a running model, use the StopModel operation.
*/
deleteModel(callback?: (err: AWSError, data: LookoutVision.Types.DeleteModelResponse) => void): Request<LookoutVision.Types.DeleteModelResponse, AWSError>;
/**
* Deletes an Amazon Lookout for Vision project. To delete a project, you must first delete each version of the model associated with the project. To delete a model use the DeleteModel operation. The training and test datasets are deleted automatically for you. The images referenced by the training and test datasets aren't deleted.
*/
deleteProject(params: LookoutVision.Types.DeleteProjectRequest, callback?: (err: AWSError, data: LookoutVision.Types.DeleteProjectResponse) => void): Request<LookoutVision.Types.DeleteProjectResponse, AWSError>;
/**
* Deletes an Amazon Lookout for Vision project. To delete a project, you must first delete each version of the model associated with the project. To delete a model use the DeleteModel operation. The training and test datasets are deleted automatically for you. The images referenced by the training and test datasets aren't deleted.
*/
deleteProject(callback?: (err: AWSError, data: LookoutVision.Types.DeleteProjectResponse) => void): Request<LookoutVision.Types.DeleteProjectResponse, AWSError>;
/**
* Describe an Amazon Lookout for Vision dataset.
*/
describeDataset(params: LookoutVision.Types.DescribeDatasetRequest, callback?: (err: AWSError, data: LookoutVision.Types.DescribeDatasetResponse) => void): Request<LookoutVision.Types.DescribeDatasetResponse, AWSError>;
/**
* Describe an Amazon Lookout for Vision dataset.
*/
describeDataset(callback?: (err: AWSError, data: LookoutVision.Types.DescribeDatasetResponse) => void): Request<LookoutVision.Types.DescribeDatasetResponse, AWSError>;
/**
* Describes a version of an Amazon Lookout for Vision model.
*/
describeModel(params: LookoutVision.Types.DescribeModelRequest, callback?: (err: AWSError, data: LookoutVision.Types.DescribeModelResponse) => void): Request<LookoutVision.Types.DescribeModelResponse, AWSError>;
/**
* Describes a version of an Amazon Lookout for Vision model.
*/
describeModel(callback?: (err: AWSError, data: LookoutVision.Types.DescribeModelResponse) => void): Request<LookoutVision.Types.DescribeModelResponse, AWSError>;
/**
* Describes an Amazon Lookout for Vision project.
*/
describeProject(params: LookoutVision.Types.DescribeProjectRequest, callback?: (err: AWSError, data: LookoutVision.Types.DescribeProjectResponse) => void): Request<LookoutVision.Types.DescribeProjectResponse, AWSError>;
/**
* Describes an Amazon Lookout for Vision project.
*/
describeProject(callback?: (err: AWSError, data: LookoutVision.Types.DescribeProjectResponse) => void): Request<LookoutVision.Types.DescribeProjectResponse, AWSError>;
/**
* Detects anomalies in an image that you supply. The response from DetectAnomalies includes a boolean prediction that the image contains one or more anomalies and a confidence value for the prediction. Before calling DetectAnomalies, you must first start your model with the StartModel operation. You are charged for the amount of time, in minutes, that a model runs and for the number of anomaly detection units that your model uses. If you are not using a model, use the StopModel operation to stop your model.
*/
detectAnomalies(params: LookoutVision.Types.DetectAnomaliesRequest, callback?: (err: AWSError, data: LookoutVision.Types.DetectAnomaliesResponse) => void): Request<LookoutVision.Types.DetectAnomaliesResponse, AWSError>;
/**
* Detects anomalies in an image that you supply. The response from DetectAnomalies includes a boolean prediction that the image contains one or more anomalies and a confidence value for the prediction. Before calling DetectAnomalies, you must first start your model with the StartModel operation. You are charged for the amount of time, in minutes, that a model runs and for the number of anomaly detection units that your model uses. If you are not using a model, use the StopModel operation to stop your model.
*/
detectAnomalies(callback?: (err: AWSError, data: LookoutVision.Types.DetectAnomaliesResponse) => void): Request<LookoutVision.Types.DetectAnomaliesResponse, AWSError>;
/**
* Lists the JSON Lines within a dataset. An Amazon Lookout for Vision JSON Line contains the anomaly information for a single image, including the image location and the assigned label.
*/
listDatasetEntries(params: LookoutVision.Types.ListDatasetEntriesRequest, callback?: (err: AWSError, data: LookoutVision.Types.ListDatasetEntriesResponse) => void): Request<LookoutVision.Types.ListDatasetEntriesResponse, AWSError>;
/**
* Lists the JSON Lines within a dataset. An Amazon Lookout for Vision JSON Line contains the anomaly information for a single image, including the image location and the assigned label.
*/
listDatasetEntries(callback?: (err: AWSError, data: LookoutVision.Types.ListDatasetEntriesResponse) => void): Request<LookoutVision.Types.ListDatasetEntriesResponse, AWSError>;
/**
* Lists the versions of a model in an Amazon Lookout for Vision project.
*/
listModels(params: LookoutVision.Types.ListModelsRequest, callback?: (err: AWSError, data: LookoutVision.Types.ListModelsResponse) => void): Request<LookoutVision.Types.ListModelsResponse, AWSError>;
/**
* Lists the versions of a model in an Amazon Lookout for Vision project.
*/
listModels(callback?: (err: AWSError, data: LookoutVision.Types.ListModelsResponse) => void): Request<LookoutVision.Types.ListModelsResponse, AWSError>;
/**
* Lists the Amazon Lookout for Vision projects in your AWS account.
*/
listProjects(params: LookoutVision.Types.ListProjectsRequest, callback?: (err: AWSError, data: LookoutVision.Types.ListProjectsResponse) => void): Request<LookoutVision.Types.ListProjectsResponse, AWSError>;
/**
* Lists the Amazon Lookout for Vision projects in your AWS account.
*/
listProjects(callback?: (err: AWSError, data: LookoutVision.Types.ListProjectsResponse) => void): Request<LookoutVision.Types.ListProjectsResponse, AWSError>;
/**
* Starts the running of the version of an Amazon Lookout for Vision model. Starting a model takes a while to complete. To check the current state of the model, use DescribeModel. Once the model is running, you can detect custom labels in new images by calling DetectAnomalies. You are charged for the amount of time that the model is running. To stop a running model, call StopModel.
*/
startModel(params: LookoutVision.Types.StartModelRequest, callback?: (err: AWSError, data: LookoutVision.Types.StartModelResponse) => void): Request<LookoutVision.Types.StartModelResponse, AWSError>;
/**
* Starts the running of the version of an Amazon Lookout for Vision model. Starting a model takes a while to complete. To check the current state of the model, use DescribeModel. Once the model is running, you can detect custom labels in new images by calling DetectAnomalies. You are charged for the amount of time that the model is running. To stop a running model, call StopModel.
*/
startModel(callback?: (err: AWSError, data: LookoutVision.Types.StartModelResponse) => void): Request<LookoutVision.Types.StartModelResponse, AWSError>;
/**
* Stops a running model. The operation might take a while to complete. To check the current status, call DescribeModel.
*/
stopModel(params: LookoutVision.Types.StopModelRequest, callback?: (err: AWSError, data: LookoutVision.Types.StopModelResponse) => void): Request<LookoutVision.Types.StopModelResponse, AWSError>;
/**
* Stops a running model. The operation might take a while to complete. To check the current status, call DescribeModel.
*/
stopModel(callback?: (err: AWSError, data: LookoutVision.Types.StopModelResponse) => void): Request<LookoutVision.Types.StopModelResponse, AWSError>;
/**
* Adds one or more JSON Line entries to a dataset. A JSON Line includes information about an image used for training or testing an Amazon Lookout for Vision model. The following is an example JSON Line. Updating a dataset might take a while to complete. To check the current status, call DescribeDataset and check the Status field in the response.
*/
updateDatasetEntries(params: LookoutVision.Types.UpdateDatasetEntriesRequest, callback?: (err: AWSError, data: LookoutVision.Types.UpdateDatasetEntriesResponse) => void): Request<LookoutVision.Types.UpdateDatasetEntriesResponse, AWSError>;
/**
* Adds one or more JSON Line entries to a dataset. A JSON Line includes information about an image used for training or testing an Amazon Lookout for Vision model. The following is an example JSON Line. Updating a dataset might take a while to complete. To check the current status, call DescribeDataset and check the Status field in the response.
*/
updateDatasetEntries(callback?: (err: AWSError, data: LookoutVision.Types.UpdateDatasetEntriesResponse) => void): Request<LookoutVision.Types.UpdateDatasetEntriesResponse, AWSError>;
}
declare namespace LookoutVision {
export type AnomalyClassFilter = string;
export type Boolean = boolean;
export type ClientToken = string;
export type ContentType = string;
export interface CreateDatasetRequest {
/**
* The name of the project in which you want to create a dataset.
*/
ProjectName: ProjectName;
/**
* The type of the dataset. Specify train for a training dataset. Specify test for a test dataset.
*/
DatasetType: DatasetType;
/**
* The location of the manifest file that Amazon Lookout for Vision uses to create the dataset. If you don't specify DatasetSource, an empty dataset is created and the operation synchronously returns. Later, you can add JSON Lines by calling UpdateDatasetEntries. If you specify a value for DataSource, the manifest at the S3 location is validated and used to create the dataset. The call to CreateDataset is asynchronous and might take a while to complete. To find out the current status, Check the value of Status returned in a call to DescribeDataset.
*/
DatasetSource?: DatasetSource;
/**
* ClientToken is an idempotency token that ensures a call to CreateDataset completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from CreateDataset. In this case, safely retry your call to CreateDataset by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to CreateDataset. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface CreateDatasetResponse {
/**
* Information about the dataset.
*/
DatasetMetadata?: DatasetMetadata;
}
export interface CreateModelRequest {
/**
* The name of the project in which you want to create a model version.
*/
ProjectName: ProjectName;
/**
* A description for the version of the model.
*/
Description?: ModelDescription;
/**
* ClientToken is an idempotency token that ensures a call to CreateModel completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from CreateModel. In this case, safely retry your call to CreateModel by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to CreateModel. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
/**
* The location where Amazon Lookout for Vision saves the training results.
*/
OutputConfig: OutputConfig;
/**
* The identifier of the AWS Key Management Service (AWS KMS) customer master key (CMK) to use for encypting the model. If this parameter is not specified, the model is encrypted by a key that AWS owns and manages.
*/
KmsKeyId?: KmsKeyId;
}
export interface CreateModelResponse {
/**
* The response from a call to CreateModel.
*/
ModelMetadata?: ModelMetadata;
}
export interface CreateProjectRequest {
/**
* S nsme for the project.
*/
ProjectName: ProjectName;
/**
* ClientToken is an idempotency token that ensures a call to CreateProject completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from CreateProject. In this case, safely retry your call to CreateProject by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to CreateProject. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface CreateProjectResponse {
/**
* Information about the project.
*/
ProjectMetadata?: ProjectMetadata;
}
export type DatasetChanges = Buffer|Uint8Array|Blob|string;
export interface DatasetDescription {
/**
* The name of the project that contains the dataset.
*/
ProjectName?: ProjectName;
/**
* The type of the dataset. The value train represents a training dataset or single dataset project. The value test represents a test dataset.
*/
DatasetType?: DatasetType;
/**
* The Unix timestamp for the time and date that the dataset was created.
*/
CreationTimestamp?: DateTime;
/**
* The Unix timestamp for the date and time that the dataset was last updated.
*/
LastUpdatedTimestamp?: DateTime;
/**
* The status of the dataset.
*/
Status?: DatasetStatus;
/**
* The status message for the dataset.
*/
StatusMessage?: DatasetStatusMessage;
/**
*
*/
ImageStats?: DatasetImageStats;
}
export type DatasetEntry = string;
export type DatasetEntryList = DatasetEntry[];
export interface DatasetGroundTruthManifest {
/**
* The S3 bucket location for the manifest file.
*/
S3Object?: InputS3Object;
}
export interface DatasetImageStats {
/**
* The total number of images in the dataset.
*/
Total?: Integer;
/**
* The total number of labeled images.
*/
Labeled?: Integer;
/**
* The total number of images labeled as normal.
*/
Normal?: Integer;
/**
* the total number of images labeled as an anomaly.
*/
Anomaly?: Integer;
}
export interface DatasetMetadata {
/**
* The type of the dataset.
*/
DatasetType?: DatasetType;
/**
* The Unix timestamp for the date and time that the dataset was created.
*/
CreationTimestamp?: DateTime;
/**
* The status for the dataset.
*/
Status?: DatasetStatus;
/**
* The status message for the dataset.
*/
StatusMessage?: DatasetStatusMessage;
}
export type DatasetMetadataList = DatasetMetadata[];
export interface DatasetSource {
/**
* Location information for the manifest file.
*/
GroundTruthManifest?: DatasetGroundTruthManifest;
}
export type DatasetStatus = "CREATE_IN_PROGRESS"|"CREATE_COMPLETE"|"CREATE_FAILED"|"UPDATE_IN_PROGRESS"|"UPDATE_COMPLETE"|"UPDATE_FAILED_ROLLBACK_IN_PROGRESS"|"UPDATE_FAILED_ROLLBACK_COMPLETE"|"DELETE_IN_PROGRESS"|"DELETE_COMPLETE"|"DELETE_FAILED"|string;
export type DatasetStatusMessage = string;
export type DatasetType = string;
export type DateTime = Date;
export interface DeleteDatasetRequest {
/**
* The name of the project that contains the dataset that you want to delete.
*/
ProjectName: ProjectName;
/**
* The type of the dataset to delete. Specify train to delete the training dataset. Specify test to delete the test dataset. To delete the dataset in a single dataset project, specify train.
*/
DatasetType: DatasetType;
/**
* ClientToken is an idempotency token that ensures a call to DeleteDataset completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from DeleteDataset. In this case, safely retry your call to DeleteDataset by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to DeleteDataset. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface DeleteDatasetResponse {
}
export interface DeleteModelRequest {
/**
* The name of the project that contains the model that you want to delete.
*/
ProjectName: ProjectName;
/**
* The version of the model that you want to delete.
*/
ModelVersion: ModelVersion;
/**
* ClientToken is an idempotency token that ensures a call to DeleteModel completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from DeleteModel. In this case, safely retry your call to DeleteModel by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to DeleteModel. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface DeleteModelResponse {
/**
* The Amazon Resource Name (ARN) of the model that was deleted.
*/
ModelArn?: ModelArn;
}
export interface DeleteProjectRequest {
/**
* The name of the project to delete.
*/
ProjectName: ProjectName;
/**
* ClientToken is an idempotency token that ensures a call to DeleteProject completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from DeleteProject. In this case, safely retry your call to DeleteProject by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to DeleteProject. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface DeleteProjectResponse {
/**
* The Amazon Resource Name (ARN) of the project that was deleted.
*/
ProjectArn?: ProjectArn;
}
export interface DescribeDatasetRequest {
/**
* The name of the project that contains the dataset that you want to describe.
*/
ProjectName: ProjectName;
/**
* The type of the dataset to describe. Specify train to describe the training dataset. Specify test to describe the test dataset. If you have a single dataset project, specify train
*/
DatasetType: DatasetType;
}
export interface DescribeDatasetResponse {
/**
* The description of the requested dataset.
*/
DatasetDescription?: DatasetDescription;
}
export interface DescribeModelRequest {
/**
* The project that contains the version of a model that you want to describe.
*/
ProjectName: ProjectName;
/**
* The version of the model that you want to describe.
*/
ModelVersion: ModelVersion;
}
export interface DescribeModelResponse {
/**
* Contains the description of the model.
*/
ModelDescription?: ModelDescription;
}
export interface DescribeProjectRequest {
/**
* The name of the project that you want to describe.
*/
ProjectName: ProjectName;
}
export interface DescribeProjectResponse {
/**
* The description of the project.
*/
ProjectDescription?: ProjectDescription;
}
export interface DetectAnomaliesRequest {
/**
* The name of the project that contains the model version that you want to use.
*/
ProjectName: ProjectName;
/**
* The version of the model that you want to use.
*/
ModelVersion: ModelVersion;
/**
* The unencrypted image bytes that you want to analyze.
*/
Body: Stream;
/**
* The type of the image passed in Body. Valid values are image/png (PNG format images) and image/jpeg (JPG format images).
*/
ContentType: ContentType;
}
export interface DetectAnomaliesResponse {
/**
* The results of the DetectAnomalies operation.
*/
DetectAnomalyResult?: DetectAnomalyResult;
}
export interface DetectAnomalyResult {
/**
* The source of the image that was analyzed. direct means that the images was supplied from the local computer. No other values are supported.
*/
Source?: ImageSource;
/**
* True if the image contains an anomaly, otherwise false.
*/
IsAnomalous?: Boolean;
/**
* The confidence that Amazon Lookout for Vision has in the accuracy of the prediction.
*/
Confidence?: Float;
}
export type Float = number;
export interface ImageSource {
/**
* The type of the image.
*/
Type?: ImageSourceType;
}
export type ImageSourceType = string;
export type InferenceUnits = number;
export interface InputS3Object {
/**
* The Amazon S3 bucket that contains the manifest.
*/
Bucket: S3BucketName;
/**
* The name and location of the manifest file withiin the bucket.
*/
Key: S3ObjectKey;
/**
* The version ID of the bucket.
*/
VersionId?: S3ObjectVersion;
}
export type Integer = number;
export type IsLabeled = boolean;
export type KmsKeyId = string;
export interface ListDatasetEntriesRequest {
/**
* The name of the project that contains the dataset that you want to list.
*/
ProjectName: ProjectName;
/**
* The type of the dataset that you want to list. Specify train to list the training dataset. Specify test to list the test dataset. If you have a single dataset project, specify train.
*/
DatasetType: DatasetType;
/**
* Specify true to include labeled entries, otherwise specify false. If you don't specify a value, Lookout for Vision returns all entries.
*/
Labeled?: IsLabeled;
/**
* Specify normal to include only normal images. Specify anomaly to only include anomalous entries. If you don't specify a value, Amazon Lookout for Vision returns normal and anomalous images.
*/
AnomalyClass?: AnomalyClassFilter;
/**
* Only includes entries before the specified date in the response. For example, 2020-06-23T00:00:00.
*/
BeforeCreationDate?: DateTime;
/**
* Only includes entries after the specified date in the response. For example, 2020-06-23T00:00:00.
*/
AfterCreationDate?: DateTime;
/**
* If the previous response was incomplete (because there is more data to retrieve), Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to retrieve the next set of dataset entries.
*/
NextToken?: PaginationToken;
/**
* The maximum number of results to return per paginated call. The largest value you can specify is 100. If you specify a value greater than 100, a ValidationException error occurs. The default value is 100.
*/
MaxResults?: PageSize;
/**
* Perform a "contains" search on the values of the source-ref key within the dataset. For example a value of "IMG_17" returns all JSON Lines where the source-ref key value matches *IMG_17*.
*/
SourceRefContains?: QueryString;
}
export interface ListDatasetEntriesResponse {
/**
* A list of the entries (JSON Lines) within the dataset.
*/
DatasetEntries?: DatasetEntryList;
/**
* If the response is truncated, Amazon Lookout for Vision returns this token that you can use in the subsequent request to retrieve the next set ofdataset entries.
*/
NextToken?: PaginationToken;
}
export interface ListModelsRequest {
/**
* The name of the project that contains the model versions that you want to list.
*/
ProjectName: ProjectName;
/**
* If the previous response was incomplete (because there is more data to retrieve), Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to retrieve the next set of models.
*/
NextToken?: PaginationToken;
/**
* The maximum number of results to return per paginated call. The largest value you can specify is 100. If you specify a value greater than 100, a ValidationException error occurs. The default value is 100.
*/
MaxResults?: PageSize;
}
export interface ListModelsResponse {
/**
* A list of model versions in the specified project.
*/
Models?: ModelMetadataList;
/**
* If the response is truncated, Amazon Lookout for Vision returns this token that you can use in the subsequent request to retrieve the next set of models.
*/
NextToken?: PaginationToken;
}
export interface ListProjectsRequest {
/**
* If the previous response was incomplete (because there is more data to retrieve), Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to retrieve the next set of projects.
*/
NextToken?: PaginationToken;
/**
* The maximum number of results to return per paginated call. The largest value you can specify is 100. If you specify a value greater than 100, a ValidationException error occurs. The default value is 100.
*/
MaxResults?: PageSize;
}
export interface ListProjectsResponse {
/**
* A list of projects in your AWS account.
*/
Projects?: ProjectMetadataList;
/**
* If the response is truncated, Amazon Lookout for Vision returns this token that you can use in the subsequent request to retrieve the next set of projects.
*/
NextToken?: PaginationToken;
}
export type ModelArn = string;
export interface ModelDescription {
/**
* The version of the model
*/
ModelVersion?: ModelVersion;
/**
* The Amazon Resource Name (ARN) of the model.
*/
ModelArn?: ModelArn;
/**
* The unix timestamp for the date and time that the model was created.
*/
CreationTimestamp?: DateTime;
/**
* The description for the model.
*/
Description?: ModelDescriptionMessage;
/**
* The status of the model.
*/
Status?: ModelStatus;
/**
* The status message for the model.
*/
StatusMessage?: ModelStatusMessage;
/**
* Performance metrics for the model. Created during training.
*/
Performance?: ModelPerformance;
/**
* The S3 location where Amazon Lookout for Vision saves model training files.
*/
OutputConfig?: OutputConfig;
/**
* The S3 location where Amazon Lookout for Vision saves the manifest file that was used to test the trained model and generate the performance scores.
*/
EvaluationManifest?: OutputS3Object;
/**
* The S3 location where Amazon Lookout for Vision saves the performance metrics.
*/
EvaluationResult?: OutputS3Object;
/**
* The unix timestamp for the date and time that the evaluation ended.
*/
EvaluationEndTimestamp?: DateTime;
/**
* The identifer for the AWS Key Management Service (AWS KMS) key that was used to encrypt the model during training.
*/
KmsKeyId?: KmsKeyId;
}
export type ModelDescriptionMessage = string;
export type ModelHostingStatus = "RUNNING"|"STARTING"|"STOPPED"|"FAILED"|string;
export interface ModelMetadata {
/**
* The unix timestamp for the date and time that the model was created.
*/
CreationTimestamp?: DateTime;
/**
* The version of the model.
*/
ModelVersion?: ModelVersion;
/**
* The Amazon Resource Name (ARN) of the model.
*/
ModelArn?: ModelArn;
/**
* The description for the model.
*/
Description?: ModelDescriptionMessage;
/**
* The status of the model.
*/
Status?: ModelStatus;
/**
* The status message for the model.
*/
StatusMessage?: ModelStatusMessage;
/**
* Performance metrics for the model. Created during training.
*/
Performance?: ModelPerformance;
}
export type ModelMetadataList = ModelMetadata[];
export interface ModelPerformance {
/**
* The overall F1 score metric for the trained model.
*/
F1Score?: Float;
/**
* The overall recall metric value for the trained model.
*/
Recall?: Float;
/**
* The overall precision metric value for the trained model.
*/
Precision?: Float;
}
export type ModelStatus = "TRAINING"|"TRAINED"|"TRAINING_FAILED"|"STARTING_HOSTING"|"HOSTED"|"HOSTING_FAILED"|"STOPPING_HOSTING"|"SYSTEM_UPDATING"|"DELETING"|string;
export type ModelStatusMessage = string;
export type ModelVersion = string;
export interface OutputConfig {
/**
* The S3 location for the output.
*/
S3Location: S3Location;
}
export interface OutputS3Object {
/**
* The bucket that contains the training output.
*/
Bucket: S3BucketName;
/**
* The location of the training output in the bucket.
*/
Key: S3ObjectKey;
}
export type PageSize = number;
export type PaginationToken = string;
export type ProjectArn = string;
export interface ProjectDescription {
/**
* The Amazon Resource Name (ARN) of the project.
*/
ProjectArn?: ProjectArn;
/**
* The name of the project.
*/
ProjectName?: ProjectName;
/**
* The unix timestamp for the date and time that the project was created.
*/
CreationTimestamp?: DateTime;
/**
* A list of datasets in the project.
*/
Datasets?: DatasetMetadataList;
}
export interface ProjectMetadata {
/**
* The Amazon Resource Name (ARN) of the project.
*/
ProjectArn?: ProjectArn;
/**
* The name of the project.
*/
ProjectName?: ProjectName;
/**
* The unix timestamp for the date and time that the project was created.
*/
CreationTimestamp?: DateTime;
}
export type ProjectMetadataList = ProjectMetadata[];
export type ProjectName = string;
export type QueryString = string;
export type S3BucketName = string;
export type S3KeyPrefix = string;
export interface S3Location {
/**
* The S3 bucket that contain the manifest file.
*/
Bucket: S3BucketName;
/**
* The path and name of the manifest file with the S3 bucket.
*/
Prefix?: S3KeyPrefix;
}
export type S3ObjectKey = string;
export type S3ObjectVersion = string;
export interface StartModelRequest {
/**
* The name of the project that contains the model that you want to start.
*/
ProjectName: ProjectName;
/**
* The version of the model that you want to start.
*/
ModelVersion: ModelVersion;
/**
* The minimum number of inference units to use. A single inference unit represents 1 hour of processing and can support up to 5 Transaction Pers Second (TPS). Use a higher number to increase the TPS throughput of your model. You are charged for the number of inference units that you use.
*/
MinInferenceUnits: InferenceUnits;
/**
* ClientToken is an idempotency token that ensures a call to StartModel completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from StartModel. In this case, safely retry your call to StartModel by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to StartModel. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface StartModelResponse {
/**
* The current running status of the model.
*/
Status?: ModelHostingStatus;
}
export interface StopModelRequest {
/**
* The name of the project that contains the model that you want to stop.
*/
ProjectName: ProjectName;
/**
* The version of the model that you want to stop.
*/
ModelVersion: ModelVersion;
/**
* ClientToken is an idempotency token that ensures a call to StopModel completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from StopModel. In this case, safely retry your call to StopModel by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to StopModel. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface StopModelResponse {
/**
* The status of the model.
*/
Status?: ModelHostingStatus;
}
export type Stream = Buffer|Uint8Array|Blob|string|Readable;
export interface UpdateDatasetEntriesRequest {
/**
* The name of the project that contains the dataset that you want to update.
*/
ProjectName: ProjectName;
/**
* The type of the dataset that you want to update. Specify train to update the training dataset. Specify test to update the test dataset. If you have a single dataset project, specify train.
*/
DatasetType: DatasetType;
/**
* The entries to add to the dataset.
*/
Changes: DatasetChanges;
/**
* ClientToken is an idempotency token that ensures a call to UpdateDatasetEntries completes only once. You choose the value to pass. For example, An issue, such as an network outage, might prevent you from getting a response from UpdateDatasetEntries. In this case, safely retry your call to UpdateDatasetEntries by using the same ClientToken parameter value. An error occurs if the other input parameters are not the same as in the first request. Using a different value for ClientToken is considered a new call to UpdateDatasetEntries. An idempotency token is active for 8 hours.
*/
ClientToken?: ClientToken;
}
export interface UpdateDatasetEntriesResponse {
/**
* The status of the dataset update.
*/
Status?: DatasetStatus;
}
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2020-11-20"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the LookoutVision client.
*/
export import Types = LookoutVision;
}
export = LookoutVision; | the_stack |
import { assert } from "chai";
import * as path from "path";
import * as fs from "fs-extra";
import * as os from "os";
import * as semver from "semver";
import {PluginManager, IPluginInfo} from "../dist/index";
describe("PluginManager:", function() {
this.timeout(15000);
this.slow(3000);
let manager: PluginManager;
beforeEach(async function() {
manager = new PluginManager({
githubAuthentication: getGithubAuth()
});
// sanity check to see if the pluginsPath is what we expect to be
if (manager.options.pluginsPath !== path.join(__dirname, "../plugin_packages")) {
throw new Error("Invalid plugins path " + manager.options.pluginsPath);
}
await fs.remove(manager.options.pluginsPath);
});
afterEach(async function() {
await fs.remove(manager.options.pluginsPath);
});
describe("installation", function() {
it("initially should not have any plugins", async function() {
const plugins = await manager.list();
assert.equal(plugins.length, 0);
assert.isUndefined(manager.alreadyInstalled("cookie"));
assert.isUndefined(manager.alreadyInstalled("moment"));
assert.isUndefined(manager.alreadyInstalled("my-basic-plugin"));
});
it("initially cannot require any plugins", async function() {
const isAvaialable = (name: string) => {
try {
manager.require(name);
return true;
} catch (e) {
try {
require(name);
return true;
} catch (e) {
return false;
}
}
};
assert.isFalse(isAvaialable("cookie"), "cookie should not be available");
assert.isFalse(isAvaialable("moment"), "moment should not be available");
assert.isFalse(isAvaialable("my-basic-plugin"), "my-basic-plugin should not be available");
});
describe("from path", function() {
it("installing a not existing plugin", async function() {
try {
await manager.installFromPath("/this/path/does-not-exists");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
it("installing a plugin", async function() {
const pluginPath = path.join(__dirname, "my-basic-plugin");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-basic-plugin");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
it("installing a plugin with a special name", async function() {
// name with dot (.)
const pluginPath = path.join(__dirname, "my-plugin.js");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-plugin.js");
assert.isDefined(pluginInstance, "my-plugin.js!");
});
it("installing a plugin 2 times doesn't have effect", async function() {
const pluginPath = path.join(__dirname, "my-basic-plugin");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-basic-plugin");
await manager.installFromPath(pluginPath);
const pluginInstance2 = manager.require("my-basic-plugin");
assert.equal(pluginInstance, pluginInstance2);
assert.equal(pluginInstance.installDate, pluginInstance2.installDate);
});
it("installing a plugin 2 times with force options allow to force a reinstallation", async function() {
const pluginPath = path.join(__dirname, "my-basic-plugin");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-basic-plugin");
await manager.installFromPath(pluginPath, {force: true});
const pluginInstance2 = manager.require("my-basic-plugin");
assert.notEqual(pluginInstance, pluginInstance2);
assert.notEqual(pluginInstance.installDate, pluginInstance2.installDate);
});
it("installing a plugin with minimal info", async function() {
const pluginPath = path.join(__dirname, "my-minimal-plugin");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-minimal-plugin");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
it("installing a plugin with node_modules should not copy node_modules", async function() {
const pluginPath = path.join(__dirname, "my-plugin-with-npm-modules");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-plugin-with-npm-modules");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
const pluginDestinationPath = path.join(manager.options.pluginsPath, "my-plugin-with-npm-modules");
assert.isTrue(fs.existsSync(pluginDestinationPath),
"Plugin directory should be copied");
assert.isFalse(fs.existsSync(path.join(pluginDestinationPath, "node_modules")),
"Directory node_modules should not be copied");
});
});
describe("from npm", function() {
it("installing from a not valid npm url", async function() {
manager = new PluginManager({
npmRegistryUrl: "http://davide.icardi.org/some-not-existing-registry/"
});
try {
await manager.installFromNpm("moment");
} catch (e) {
return;
}
throw new Error("Expected to throw");
});
it("installing a not existing plugin", async function() {
try {
await manager.installFromNpm("this-does-not-exists", "9.9.9");
} catch (e) {
return;
}
throw new Error("Expected to throw");
});
it("installing a plugin (cookie)", async function() {
await manager.installFromNpm("cookie", "0.3.1");
const cookie = manager.require("cookie");
assert.isDefined(cookie, "Plugin is not loaded");
// try to use the plugin
const result = cookie.parse("foo=bar;x=y");
assert.equal(result.foo, "bar");
assert.equal(result.x, "y");
});
it("installing a plugin already present in the folder will succeeded also if npm is down", async function() {
// download it to ensure it is present
await manager.installFromNpm("cookie", "0.3.1");
const failedManager = new PluginManager({
npmRegistryUrl: "http://davideicardi.com/some-not-existing-registry/"
});
await failedManager.installFromNpm("cookie", "0.3.1");
const cookie = manager.require("cookie");
assert.isDefined(cookie, "Plugin is not loaded");
// try to use the plugin
const result = cookie.parse("foo=bar;x=y");
assert.equal(result.foo, "bar");
assert.equal(result.x, "y");
});
it("installing a plugin already present in the folder will fail if npm is down and noCache is used", async function() {
// download it to ensure it is present
await manager.installFromNpm("cookie", "0.3.1");
const failedManager = new PluginManager({
npmRegistryUrl: "http://davideicardi.com/some-not-existing-registry/",
npmInstallMode: "noCache"
});
try {
await failedManager.installFromNpm("cookie", "0.3.1");
} catch (e) {
return;
}
throw new Error("Expected to throw");
});
it("installing a plugin already present in the folder will fail if npm is down and I ask for latest", async function() {
// download it to ensure it is present
await manager.installFromNpm("cookie", "0.3.1");
const failedManager = new PluginManager({
npmRegistryUrl: "http://davideicardi.com/some-not-existing-registry/"
});
try {
await failedManager.installFromNpm("cookie");
} catch (e) {
return;
}
throw new Error("Expected to throw");
});
});
describe("from github", function() {
this.slow(4000);
it("api configuration", function() {
if (!manager.options.githubAuthentication) {
console.error("WARNING: No github_auth.json or github_auth_username env variable found, github api can give rate limits errors");
}
});
it("installing a not existing plugin", async function() {
try {
await manager.installFromGithub("this/doesnotexists");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
// NOTE: Initially I have tried with lodash but it doesn't have a valid structure
// (missing lodash.js, probably need a compilation)
it("installing a plugin from master branch (underscore)", async function() {
await manager.installFromGithub("jashkenas/underscore");
const _ = manager.require("underscore");
assert.isDefined(_, "Plugin is not loaded");
// try to use the plugin
const result = _.defaults({ a: 1 }, { a: 3, b: 2 });
assert.equal(result.a, 1);
assert.equal(result.b, 2);
});
it("installing a plugin from commit (underscore)", async function() {
const pluginInfo = await manager.installFromGithub("jashkenas/underscore#1aed9ec");
assert.equal(pluginInfo.version, "1.8.0");
const _ = manager.require("underscore");
assert.isDefined(_, "Plugin is not loaded");
// try to use the plugin
const result = _.defaults({ a: 1 }, { a: 3, b: 2 });
assert.equal(result.a, 1);
assert.equal(result.b, 2);
});
it("installing a plugin from tag (underscore)", async function() {
const pluginInfo = await manager.installFromGithub("jashkenas/underscore#1.8.0");
assert.equal(pluginInfo.version, "1.8.0");
const _ = manager.require("underscore");
assert.isDefined(_, "Plugin is not loaded");
// try to use the plugin
const result = _.defaults({ a: 1 }, { a: 3, b: 2 });
assert.equal(result.a, 1);
assert.equal(result.b, 2);
});
});
describe("from code", function() {
for (const invalidName of ["../test", ".\\test", "", undefined, null]) {
it(`installing a not valid plugin name "${invalidName}" is not supported`, async function() {
try {
const n = invalidName as any;
await manager.installFromNpm(n, "9.9.9");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
}
it("installing a plugin", async function() {
const code = `module.exports = "Hello from code plugin";`;
await manager.installFromCode("my-code-plugin", code);
const myPlugin = manager.require("my-code-plugin");
assert.isDefined(myPlugin, "Plugin is not loaded");
// try to use the plugin
assert.equal(myPlugin, "Hello from code plugin");
});
it("update a plugin", async function() {
const code = `module.exports = "Hello from code plugin";`;
await manager.installFromCode("my-code-plugin", code);
const myPlugin = manager.require("my-code-plugin");
assert.equal(myPlugin, "Hello from code plugin");
const codeV2 = `module.exports = "V2";`;
await manager.installFromCode("my-code-plugin", codeV2);
const myPluginV2 = manager.require("my-code-plugin");
assert.equal(myPluginV2, "V2");
});
it("uninstalling a plugin", async function() {
const code = `module.exports = "Hello from code plugin";`;
await manager.installFromCode("my-code-plugin", code);
const myPlugin = manager.require("my-code-plugin");
assert.equal(myPlugin, "Hello from code plugin");
await manager.uninstall("my-code-plugin");
try {
manager.require("my-code-plugin");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
describe("given a plugin with an unknown dependency", function() {
beforeEach(async function() {
const code = `module.exports = require("some-not-valid-dependency");`;
await manager.installFromCode("my-code-plugin", code);
});
it("should give an error on require", async function() {
try {
manager.require("my-code-plugin");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
it("after a failed require it shold fail also for next require", async function() {
// there was a bug that cache a failed plugin also on error
for (let i = 0; i < 10; i++) {
try {
manager.require("my-code-plugin");
} catch (e) {
continue;
}
throw new Error("Expected to fail");
}
});
});
});
});
describe("run script", function() {
it("simple script", async function() {
const code = `
const a = 1;
const b = 3;
module.exports = a + b;
`;
const result = manager.runScript(code);
assert.equal(result, 4);
});
it("script with comment at the end", async function() {
const code = `
const a = 1;
const b = 3;
module.exports = a + b;
// some content`;
const result = manager.runScript(code);
assert.equal(result, 4);
});
it("require system module", async function() {
const code = `
const os = require("os");
module.exports = os.hostname();
`;
const result = manager.runScript(code);
assert.equal(result, os.hostname());
});
});
describe("given an installed plugin", function() {
let pluginInfo: IPluginInfo;
beforeEach(async function() {
pluginInfo = await manager.installFromNpm("moment", "2.18.1");
});
it("alreadyInstalled function should respect semver", function() {
assert.isDefined(manager.alreadyInstalled("moment"));
assert.isDefined(manager.alreadyInstalled("moment", "2.18.1"));
assert.isDefined(manager.alreadyInstalled("moment", "v2.18.1"));
assert.isDefined(manager.alreadyInstalled("moment", "=2.18.1"));
assert.isDefined(manager.alreadyInstalled("moment", ">=2.18.1"));
assert.isDefined(manager.alreadyInstalled("moment", "^2.18.1"));
assert.isDefined(manager.alreadyInstalled("moment", "^2.0.0"));
assert.isDefined(manager.alreadyInstalled("moment", ">=1.0.0"));
assert.isUndefined(manager.alreadyInstalled("moment", "2.17.0"));
assert.isUndefined(manager.alreadyInstalled("moment", "2.19.0"));
assert.isUndefined(manager.alreadyInstalled("moment", "3.0.0"));
assert.isUndefined(manager.alreadyInstalled("moment", "=3.0.0"));
assert.isUndefined(manager.alreadyInstalled("moment", "^3.0.0"));
});
it("alreadyInstalled function should support greater mode (for dependencies)", function() {
assert.isDefined(manager.alreadyInstalled("moment", undefined, "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "2.18.1", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "v2.18.1", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "=2.18.1", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", ">=2.18.1", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "^2.18.1", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "^2.0.0", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", ">=1.0.0", "satisfiesOrGreater"));
// this is considered installed with this mode
assert.isDefined(manager.alreadyInstalled("moment", "2.17.0", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "1.17.0", "satisfiesOrGreater"));
assert.isDefined(manager.alreadyInstalled("moment", "^1.17.0", "satisfiesOrGreater"));
assert.isUndefined(manager.alreadyInstalled("moment", "2.19.0", "satisfiesOrGreater"));
assert.isUndefined(manager.alreadyInstalled("moment", "3.0.0", "satisfiesOrGreater"));
assert.isUndefined(manager.alreadyInstalled("moment", "=3.0.0", "satisfiesOrGreater"));
assert.isUndefined(manager.alreadyInstalled("moment", "^3.0.0", "satisfiesOrGreater"));
});
it("should be available", async function() {
const plugins = await manager.list();
assert.equal(plugins.length, 1);
assert.equal(plugins[0].name, "moment");
assert.equal(plugins[0].version, "2.18.1");
assert.equal(plugins[0].location, path.join(manager.options.pluginsPath, "moment"));
assert.isTrue(fs.existsSync(pluginInfo.location));
const moment = manager.require("moment");
assert.isDefined(moment, "Plugin is not loaded");
assert.equal(manager.getInfo("moment"), pluginInfo);
});
it("require always return the same instance", async function() {
const instance1 = manager.require("moment");
const instance2 = manager.require("moment");
assert.equal(instance1, instance2);
});
it("dynamic script can require a plugin", async function() {
const code = `
const m = require("moment");
module.exports = m;
`;
const result = manager.runScript(code);
const expectedInstance = manager.require("moment");
assert.equal(expectedInstance, result);
});
it("code plugin can require another plugin", async function() {
const code = `
const m = require("moment");
module.exports = m;
`;
await manager.installFromCode("myplugin", code);
const result = manager.require("myplugin");
const expectedInstance = manager.require("moment");
assert.equal(expectedInstance, result);
});
describe("when uninstalled", function() {
beforeEach(async function() {
await manager.uninstall("moment");
});
it("should not be available anymore", async function() {
const plugins = await manager.list();
assert.equal(plugins.length, 0);
assert.isFalse(fs.existsSync(pluginInfo.location), "Directory still exits");
});
it("requiring a not installed plugin throw an error", async function() {
try {
manager.require("moment");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
it("directly requiring a not installed plugin throw an error", async function() {
try {
require("moment");
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
it("requiring a not installed plugin using it's path throw an error", async function() {
// Ensure that the plugin is really unloaded
try {
require(pluginInfo.location);
} catch (e) {
return;
}
throw new Error("Expected to fail");
});
});
});
describe("given a plugin x that depend on y", function() {
describe("when plugin y is installed", function() {
beforeEach(async function() {
await manager.installFromPath(path.join(__dirname, "my-plugin-y"));
});
it("when plugin x is installed can require plugin y", async function() {
await manager.installFromPath(path.join(__dirname, "my-plugin-x"));
const x = manager.require("my-plugin-x");
assert.equal(x.y, "y!");
});
it("when plugin x is installed can require plugin y sub file", async function() {
await manager.installFromPath(path.join(__dirname, "my-plugin-x"));
const x = manager.require("my-plugin-x");
assert.equal(x.y_subFile, "y_subFile!");
});
});
});
describe("require", function() {
// TODO review this test, split it in microtest
it("plugins respect the same node.js behavior", async function() {
const pluginSourcePath = path.join(__dirname, "my-test-plugin");
await manager.installFromPath(pluginSourcePath);
const pluginInstance = manager.require("my-test-plugin");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
assert.equal(pluginInstance.myVariable2, "value2");
assert.equal(pluginInstance.myVariableFromSubFile, "value3");
assert.equal(pluginInstance.myVariableFromSubFolder, "value4");
assert.equal(pluginInstance.myVariableDifferentStyleOfRequire, "value5");
assert.equal(pluginInstance.myJsonRequire.loaded, "yes");
assert.equal(
pluginInstance.myGlobals.__filename,
path.join(manager.options.pluginsPath, "my-test-plugin", "index.js"));
assert.equal(pluginInstance.myGlobals.__dirname, path.join(manager.options.pluginsPath, "my-test-plugin"));
assert.equal(pluginInstance.myGlobals.clearImmediate, clearImmediate);
assert.equal(pluginInstance.myGlobals.clearInterval, clearInterval);
assert.equal(pluginInstance.myGlobals.clearTimeout, clearTimeout);
assert.equal(pluginInstance.myGlobals.setImmediate, setImmediate);
assert.equal(pluginInstance.myGlobals.setInterval, setInterval);
assert.equal(pluginInstance.myGlobals.setTimeout, setTimeout);
assert.equal(pluginInstance.myGlobals.Buffer, Buffer);
assert.equal(pluginInstance.myGlobals.Function, Function);
// NOTE: process and console are not the same but they should be available
assert.isDefined(pluginInstance.myGlobals.process);
assert.isDefined(pluginInstance.myGlobals.console);
});
it("require absolute files", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-abs-require");
await manager.installFromPath(pluginSourcePath);
const pluginInstance = manager.require("my-plugin-with-abs-require");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariableFromAbsoluteFile, "value3");
});
it("requre a plugin sub folder", async function() {
const pluginSourcePath = path.join(__dirname, "my-test-plugin");
await manager.installFromPath(pluginSourcePath);
const result = manager.require("my-test-plugin/subFolder");
assert.isDefined(result, "value4");
});
it("requre a plugin sub file", async function() {
const pluginSourcePath = path.join(__dirname, "my-test-plugin");
await manager.installFromPath(pluginSourcePath);
const result = manager.require("my-test-plugin/subFolder/b");
assert.isDefined(result, "value3");
});
it("index file can be required explicitly or implicitly", async function() {
const pluginSourcePath = path.join(__dirname, "my-test-plugin");
await manager.installFromPath(pluginSourcePath);
const resultImplicit = manager.require("my-test-plugin");
const resultExplicit = manager.require("my-test-plugin/index");
const resultExplicit2 = manager.require("my-test-plugin/index.js");
assert.equal(resultImplicit, resultExplicit);
assert.equal(resultImplicit, resultExplicit2);
});
it("installing a plugin with folder as main", async function() {
const pluginPath = path.join(__dirname, "my-plugin-with-folder-as-main");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-plugin-with-folder-as-main");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
it("installing a plugin with a circular reference in require", async function() {
const pluginPath = path.join(__dirname, "my-plugin-with-circular-reference");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-plugin-with-circular-reference");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
it("file should wins over folder with the same name", async function() {
const pluginPath = path.join(__dirname, "my-plugin-file-win-over-folder");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("my-plugin-file-win-over-folder");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance, "i-am-the-file");
});
});
describe("scoped plugins", function() {
it("installing a scoped plugin", async function() {
const pluginPath = path.join(__dirname, "my-basic-plugin-scoped");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("@myscope/my-basic-plugin-scoped");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
it("installing a scoped plugin with path", async function() {
const pluginPath = path.join(__dirname, "my-basic-plugin-scoped");
await manager.installFromPath(pluginPath);
const pluginInstance = manager.require("@myscope/my-basic-plugin-scoped/index.js");
assert.isDefined(pluginInstance, "Plugin is not loaded");
assert.equal(pluginInstance.myVariable, "value1");
});
});
describe("plugins dependencies", function() {
this.slow(6000);
describe("Npm dependencies", function() {
describe("Given a package with npm dependencies", function() {
beforeEach(async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-dep");
await manager.installFromPath(pluginSourcePath);
});
it("dependencies are installed", async function() {
assert.equal(manager.list().length, 2);
assert.equal(manager.list()[0].name, "moment");
assert.equal(manager.list()[1].name, "my-plugin-with-dep");
});
it("dependencies are available", async function() {
const pluginInstance = manager.require("my-plugin-with-dep");
assert.equal(pluginInstance.testDebug, require("debug")); // I expect to be exactly the same
assert.equal(pluginInstance.testMoment, "1981/10/06");
});
it("by default @types dependencies are not installed", async function() {
for (const p of manager.list()) {
assert.notEqual(p.name, "@types/express");
}
});
it("dependencies installed in the host are not installed but are available", async function() {
// debug package is already available in the host
for (const p of manager.list()) {
assert.notEqual(p.name, "debug");
}
});
describe("uninstalling a dependency (moment)", function() {
beforeEach(async function() {
await manager.uninstall("moment");
});
it("requiring the plugin will fail", function() {
try {
manager.require("my-plugin-with-dep");
} catch (e) {
return;
}
throw new Error("Excepted to fail");
});
it("if dependency is reinstalled plugin will work again", async function() {
await manager.installFromNpm("moment", "2.18.1");
const pluginInstance = manager.require("my-plugin-with-dep");
assert.equal(pluginInstance.testMoment, "1981/10/06");
});
it("after a plugin load error if dependency is reinstalled plugin will work again", async function() {
let initialFailed = false;
try {
manager.require("my-plugin-with-dep");
} catch (e) {
initialFailed = true;
}
assert.isTrue(initialFailed, "expected to fail to load without moment");
await manager.installFromNpm("moment", "2.18.1");
const pluginInstance = manager.require("my-plugin-with-dep");
assert.equal(pluginInstance.testMoment, "1981/10/06");
});
});
});
});
describe("Github dependencies", function() {
it("dependencies are installed", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-git-dep");
await manager.installFromPath(pluginSourcePath);
assert.equal(manager.list().length, 2);
assert.equal(manager.list()[0].name, "underscore");
assert.equal(manager.list()[1].name, "my-plugin-with-git-dep");
});
});
describe("Given some ignored dependencies", function() {
beforeEach(function() {
manager.options.ignoredDependencies = [/^@types\//, "moment"];
});
it("ignored dependencies are not installed", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-dep");
await manager.installFromPath(pluginSourcePath);
for (const p of manager.list()) {
assert.notEqual(p.name, "moment");
}
});
it("if the ignored dependencies is required the plugin will not be loaded", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-dep");
await manager.installFromPath(pluginSourcePath);
// expected to fail because moment is missing...
try {
manager.require("my-plugin-with-dep");
} catch (err) {
assert.isTrue(err.message.includes("Cannot find module 'moment'"));
return;
}
throw new Error("Expected to fail");
});
});
describe("handling updates", function() {
beforeEach(async function() {
await manager.installFromPath(path.join(__dirname, "my-plugin-a@v1"));
await manager.installFromPath(path.join(__dirname, "my-plugin-b")); // depend on my-plugin-a@1.0.0
});
it("updating a dependency will reload dependents", async function() {
// load the plugin before installing the new version
// to ensure that the starting condition is valid
assert.equal(manager.list().length, 2);
assert.equal(manager.list()[0].name, "my-plugin-a");
assert.equal(manager.list()[0].version, "1.0.0");
assert.equal(manager.list()[1].name, "my-plugin-b");
const initialPluginInstance = manager.require("my-plugin-b");
assert.equal(initialPluginInstance, "a = v1");
await manager.installFromPath(path.join(__dirname, "my-plugin-a@v2"));
assert.equal(manager.list().length, 2);
assert.isDefined(manager.alreadyInstalled("my-plugin-b", "=1.0.0"));
assert.isDefined(manager.alreadyInstalled("my-plugin-a", "=2.0.0"));
const pluginInstance = manager.require("my-plugin-b");
assert.equal(pluginInstance, "a = v2");
});
it("updating a package that need a prev version will not downgrade the dependency", async function() {
await manager.installFromPath(path.join(__dirname, "my-plugin-a@v2")); // update dependency to v2
await manager.uninstall("my-plugin-b");
await manager.installFromPath(path.join(__dirname, "my-plugin-b")); // depend on my-plugin-a@1.0.0
assert.equal(manager.list().length, 2);
assert.equal(manager.list()[0].name, "my-plugin-a");
assert.equal(manager.list()[0].version, "2.0.0");
assert.equal(manager.list()[1].name, "my-plugin-b");
const initialPluginInstance = manager.require("my-plugin-b");
assert.equal(initialPluginInstance, "a = v2");
});
});
describe("given static dependencies", function() {
beforeEach(function() {
const momentStub = () => {
return {
format: () => "this is moment stub"
};
};
manager.options.staticDependencies = {moment: momentStub};
});
it("static dependencies are not installed but resolved correctly", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-dep");
await manager.installFromPath(pluginSourcePath);
assert.equal(manager.list().length, 1);
assert.equal(manager.list()[0].name, "my-plugin-with-dep");
const pluginInstance = manager.require("my-plugin-with-dep");
assert.equal(pluginInstance.testMoment, "this is moment stub");
});
});
describe("Not compatible dependencies with host", function() {
// Note: Assume that host contains "debug" npm package at version 3
it("dependencies are installed", async function() {
// this package contains "debug" at version 2 (different from the host)
const pluginSourcePath = path.join(__dirname, "my-plugin-with-diff-dep");
await manager.installFromPath(pluginSourcePath);
assert.equal(manager.list().length, 3);
assert.equal(manager.list()[0].name, "ms"); // this is a dependency of debug
assert.equal(manager.list()[1].name, "debug");
assert.equal(manager.list()[2].name, "my-plugin-with-diff-dep");
});
it("dependencies are available", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-diff-dep");
await manager.installFromPath(pluginSourcePath);
const pluginInstance = manager.require("my-plugin-with-diff-dep");
assert.notEqual(pluginInstance.testDebug, require("debug")); // I expect to be different (v2 vs v3)
});
it("dependencies is not the same", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-with-diff-dep");
await manager.installFromPath(pluginSourcePath);
const pluginDebugInstance = manager.require("debug/package.json");
const hostDebugInstance = require("debug/package.json");
assert.equal(pluginDebugInstance.version, "2.6.9");
assert.equal(hostDebugInstance.version.substring(0, 1), "4");
assert.notEqual(pluginDebugInstance.version, hostDebugInstance.version); // I expect to be different (v2 vs v3)
});
});
describe("given an host dependency", function() {
const hostDependencyDestPath = path.join(__dirname, "..", "node_modules", "host-dependency");
// given a dependency installed in the host
// with version 1
// note: I simulate an host dependency by manually copy it in the node_modules folder
before(async function() {
const hostDependencySourcePath = path.join(__dirname, "host-dependency@v1");
await fs.copy(hostDependencySourcePath, hostDependencyDestPath);
});
after(async function() {
await fs.remove(hostDependencyDestPath);
});
it("it can be resolved", function() {
const dependency = require("host-dependency");
assert.isDefined(dependency);
assert.equal(dependency, "v1.0.0");
const dependencyPackage = require("host-dependency/package.json");
assert.equal(dependencyPackage.version, "1.0.0");
});
describe("when installing plugin that depends on the host dependency", function() {
beforeEach(async function() {
// this package depends on "host-dependency" at version ^1.0.0
const pluginSourcePath = path.join(__dirname, "my-plugin-with-host-dep");
await manager.installFromPath(pluginSourcePath);
});
it("dependency is not installed because already installed in host", function() {
assert.equal(manager.list().length, 1);
assert.equal(manager.list()[0].name, "my-plugin-with-host-dep");
});
it("it is resolved using the host dependency", function() {
const pluginInstance = manager.require("my-plugin-with-host-dep");
assert.isDefined(pluginInstance);
assert.equal(pluginInstance.testHostDependency, require("host-dependency"));
assert.equal(pluginInstance.testHostDependency, "v1.0.0");
});
describe("when installing an update of the host dependency", function() {
beforeEach(async function() {
const pluginSourcePath = path.join(__dirname, "host-dependency@v1.0.1");
await manager.installFromPath(pluginSourcePath);
});
it("dependency is installed/updated", function() {
assert.equal(manager.list().length, 2);
assert.equal(manager.list()[0].name, "my-plugin-with-host-dep");
assert.equal(manager.list()[1].name, "host-dependency");
assert.equal(manager.list()[1].version, "1.0.1");
});
it("the updated dependency is now used by all dependants", function() {
const pluginInstance = manager.require("my-plugin-with-host-dep");
assert.isDefined(pluginInstance);
assert.notEqual(pluginInstance.testHostDependency, require("host-dependency"));
assert.equal(pluginInstance.testHostDependency, "v1.0.1");
});
describe("when uninstalling the update", function() {
beforeEach(async function() {
await manager.uninstall("host-dependency");
});
it("dependency is uninstalled", function() {
assert.equal(manager.list().length, 1);
assert.equal(manager.list()[0].name, "my-plugin-with-host-dep");
});
it("it is again resolved using the host dependency", function() {
const pluginInstance = manager.require("my-plugin-with-host-dep");
assert.isDefined(pluginInstance);
assert.equal(pluginInstance.testHostDependency, require("host-dependency"));
assert.equal(pluginInstance.testHostDependency, "v1.0.0");
});
});
});
});
});
});
describe("query npm package", function() {
it("get latest version info", async function() {
const info = await manager.queryPackageFromNpm("lodash");
assert.equal("lodash", info.name);
assert.isDefined(info.version, "Version not defined");
});
it("get latest version info (with string empty version)", async function() {
const info = await manager.queryPackageFromNpm("lodash", "");
assert.equal("lodash", info.name);
assert.isDefined(info.version, "Version not defined");
});
it("get latest version info (with undefined version)", async function() {
const info = await manager.queryPackageFromNpm("lodash", undefined);
assert.equal("lodash", info.name);
assert.isDefined(info.version, "Version not defined");
});
it("get latest version info (with null version)", async function() {
const info = await manager.queryPackageFromNpm("lodash", null as any);
assert.equal("lodash", info.name);
assert.isDefined(info.version, "Version not defined");
});
it("get specific version info", async function() {
let info = await manager.queryPackageFromNpm("lodash", "4.17.4");
assert.equal(info.name, "lodash");
assert.equal(info.version, "4.17.4");
info = await manager.queryPackageFromNpm("lodash", "=4.17.4");
assert.equal(info.name, "lodash");
assert.equal(info.version, "4.17.4");
});
it("get caret version range info", async function() {
const info = await manager.queryPackageFromNpm("lodash", "^3.0.0");
assert.equal(info.name, "lodash");
assert.equal(info.version, "3.10.1"); // this test can fail if lodash publish a 3.x version
});
it("get latest version info for scoped packages", async function() {
const info = await manager.queryPackageFromNpm("@types/node");
assert.equal("@types/node", info.name);
assert.isDefined(info.version);
});
it("get specific version info for scoped packages", async function() {
let info = await manager.queryPackageFromNpm("@types/node", "7.0.13");
assert.equal("@types/node", info.name);
assert.equal(info.version, "7.0.13");
info = await manager.queryPackageFromNpm("@types/node", "=7.0.13");
assert.equal("@types/node", info.name);
assert.equal(info.version, "7.0.13");
});
it("get caret version range info for scoped packages", async function() {
const info = await manager.queryPackageFromNpm("@types/node", "^6.0.0");
assert.equal(info.name, "@types/node");
assert.isTrue(semver.gt(info.version, "6.0.0"), "Should get a version greater than 6.0.0");
assert.isTrue(semver.lt(info.version, "7.0.0"), "Should get a version less than 7.0.0");
});
});
describe("query github package", function() {
it("get version info", async function() {
const info = await manager.queryPackageFromGithub("lodash/lodash");
assert.equal("lodash", info.name);
assert.isDefined(info.version);
});
});
describe("query package info", function() {
it("get version from github", async function() {
const info = await manager.queryPackage("lodash", "lodash/lodash");
assert.equal("lodash", info.name);
assert.isDefined(info.version);
});
it("get version from npm", async function() {
const info = await manager.queryPackage("lodash", "4.17.4");
assert.equal("lodash", info.name);
assert.isDefined(info.version);
});
});
describe("locking", function() {
beforeEach(function() {
// reduce lock timeout for test reason
manager.options.lockWait = 50;
manager.options.lockStale = 1000;
});
it("cannot install multiple package concurrently", async function() {
// I expect this to take some time...
const installation1 = manager.installFromNpm("moment");
// so I expect a concurrent installation to fail...
const pluginSourcePath = path.join(__dirname, "my-basic-plugin");
const installation2 = manager.installFromPath(pluginSourcePath);
try {
await installation2;
} catch (err) {
await installation1;
return;
}
throw new Error("Expected to fail");
});
describe("given a lock", function() {
beforeEach(async function() {
await fs.ensureDir(manager.options.pluginsPath);
// simulate a lock
await (manager as any).syncLock();
manager.options.lockStale = 1000;
});
afterEach(async function() {
// simulate unlock
await (manager as any).syncUnlock();
});
it("cannot install package", async function() {
const pluginSourcePath = path.join(__dirname, "my-basic-plugin");
const installation = manager.installFromPath(pluginSourcePath);
try {
await installation;
} catch (err) {
return;
}
throw new Error("Expected to fail");
});
it("sync is considered stale after some time", async function() {
await sleep(manager.options.lockStale + 1);
// expected to succeeded because lock is considered stale
const pluginSourcePath = path.join(__dirname, "my-basic-plugin");
await manager.installFromPath(pluginSourcePath);
});
});
});
describe("sandbox", function() {
describe("given globals variables", function() {
it("unknown globals throw an exception", function() {
const code = `module.exports = someUnknownGlobalVar;`;
try {
manager.runScript(code);
} catch {
return;
}
throw new Error("Excepted to fail");
});
it("globals are available", function() {
const code = `module.exports = encodeURIComponent("test/1");`;
const result = manager.runScript(code);
assert.equal(result, encodeURIComponent("test/1"));
});
it("globals are inherited from host", function() {
// Note: this is a bad practice (modify global...) but I support it
(global as any).myCustomGlobalVar = "myCustomGlobalVar1";
const code = `module.exports = myCustomGlobalVar`;
const result = manager.runScript(code);
assert.equal(result, "myCustomGlobalVar1");
});
it("globals can be overwritten from host", function() {
(manager.options.sandbox.global as any) = {
...global, // copy default global
myCustomGlobalVar: "myCustomGlobalVar2"
};
const code = `module.exports = myCustomGlobalVar`;
const result = manager.runScript(code);
assert.equal(result, "myCustomGlobalVar2");
});
it("overwritten globals not affect host, is isolated", function() {
assert.isUndefined((global as any).SOME_OTHER_KEY, "Initially host should not have it");
manager.options.sandbox.global = {...global, SOME_OTHER_KEY: "test1" } as any;
const code = `module.exports = SOME_OTHER_KEY;`;
const result = manager.runScript(code);
assert.equal(result, "test1");
assert.isUndefined((global as any).SOME_OTHER_KEY, "Host should not inherit it");
});
});
describe("given an environment variables", function() {
beforeEach(function() {
process.env.SOME_RANDOM_KEY = "test1";
});
afterEach(function() {
delete process.env.SOME_RANDOM_KEY;
});
it("plugins inherit from host", function() {
const code = `module.exports = process.env.SOME_RANDOM_KEY;`;
const result = manager.runScript(code);
assert.equal(result, "test1");
});
it("allow to override env from host", function() {
manager.options.sandbox.env = { SOME_KEY: "test2" };
const code = `module.exports = process.env.SOME_RANDOM_KEY;`;
const result = manager.runScript(code);
assert.isUndefined(result);
const code2 = `module.exports = process.env.SOME_KEY;`;
const result2 = manager.runScript(code2);
assert.equal(result2, "test2");
});
it("overwritten env not affect host, is isolated", function() {
assert.isUndefined(process.env.SOME_PLUGIN_KEY, "Initially host should not have it");
manager.options.sandbox.env = { SOME_PLUGIN_KEY: "test2" };
const code = `module.exports = process.env.SOME_PLUGIN_KEY;`;
const result = manager.runScript(code);
assert.equal(result, "test2");
assert.isUndefined(process.env.SOME_PLUGIN_KEY, "Host should not inherit it");
});
});
describe("sandbox specific for plugin", function() {
it("set sandbox for a specific plugin", async function() {
const code = `module.exports = process.env.SOME_RANDOM_KEY;`;
await manager.installFromCode("my-plugin-with-sandbox", code);
manager.setSandboxTemplate("my-plugin-with-sandbox", {
env: {
SOME_RANDOM_KEY: "test1"
}
});
const result = manager.require("my-plugin-with-sandbox");
assert.equal(result, "test1");
});
it("A plugin share the same globals between modules", async function() {
const pluginSourcePath = path.join(__dirname, "my-plugin-env-global");
await manager.installFromPath(pluginSourcePath);
const result = manager.require("my-plugin-env-global");
assert.equal(result, "Hello world!");
});
it("plugins not share global and env with host, is isolated", function() {
assert.isUndefined(process.env.SOME_PLUGIN_KEY, "Initially host should not have it");
assert.isUndefined((global as any).SOME_OTHER_KEY, "Initially host should not have it");
const code = `
global.SOME_OTHER_KEY = "test1";
process.env.SOME_PLUGIN_KEY = "test2";
module.exports = SOME_OTHER_KEY + process.env.SOME_PLUGIN_KEY;`;
const result = manager.runScript(code);
assert.equal(result, "test1test2");
assert.isUndefined(process.env.SOME_PLUGIN_KEY, "Host should not inherit it");
assert.isUndefined((global as any).SOME_OTHER_KEY, "Host should not have it");
});
});
describe("NodeRequire object inside a plugin", function() {
it("require system module", async function() {
const code = `module.exports = require("fs");`;
await manager.installFromCode("my-plugin-with-sandbox", code);
const result = manager.require("my-plugin-with-sandbox");
assert.equal(result, require("fs"));
});
it("require.resolve system module", async function() {
const code = `module.exports = require.resolve("fs");`;
await manager.installFromCode("my-plugin-with-sandbox", code);
const result = manager.require("my-plugin-with-sandbox");
assert.equal(result, require.resolve("fs"));
});
});
});
});
function getGithubAuth() {
try {
return require("./github_auth.json");
} catch (e) {
if (process.env.github_auth_username) {
return {
type: "basic",
username: process.env.github_auth_username,
password: process.env.github_auth_token
};
}
return undefined;
}
}
function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
process.on("unhandledRejection", (reason: any, p) => {
console.log("Unhandled Rejection at: Promise", p, "reason:", (reason && reason.stack));
}); | the_stack |
module FlipperUtils {
"use strict";
// Time to wait if using custom animation that is instant.
export var NAVIGATION_QUICK_TIMEOUT = 500;
// Time to wait before any navigation should have completed (animations take 200ms by default).
export var NAVIGATION_TIMEOUT = Math.min(NAVIGATION_QUICK_TIMEOUT, WinJS.UI._animationTimeAdjustment(3000));
var mainFlipperDivId = "MainFlipperDiv";
var utils = Helper;
var FlipView = <typeof WinJS.UI.PrivateFlipView>WinJS.UI.FlipView;
// Used for TestDataSource
var controller = {
directivesForMethod: function (method, args) {
return {
callMethodSynchronously: true,
countBeforeDelta: 0,
countAfterDelta: 0
};
}
};
// Flipper Events
var pageVisibilityEvent = "pagevisibilitychanged";
var datasourceChangedEvent = "datasourcecountchanged";
var pageSelectedEvent = "pagecompleted";
// This is declaration for checkComplete event handler used in navigation methods ensureNext, ensurePrevious and ensureCurrentPage.
var checkComplete = {};
// This will verify that the flipper actually flipped to the appropriate page for ensureNext and ensurePrevious.
function VerifyLocationAfterFlip(flipper, expectedPage, callback) {
flipper.removeEventListener(pageSelectedEvent, checkComplete, false);
LiveUnit.LoggingCore.logComment("Flipper currentPage index after navigation: " + flipper.currentPage);
LiveUnit.Assert.areEqual(expectedPage, flipper.currentPage, "Flipper currentPage should be at index " + expectedPage);
callback();
}
// This will verify that the flipper actually flipped to the appropriate page for ensureCurrentPage.
function VerifyLocationAfterJump(flipper, expectedPage, callback) {
flipper.removeEventListener(pageSelectedEvent, checkComplete, false);
LiveUnit.LoggingCore.logComment("Flipper currentPage index after navigation: " + flipper.currentPage);
LiveUnit.Assert.areEqual(expectedPage, flipper.currentPage, "Flipper currentPage should be at index " + expectedPage);
callback();
}
export function flipperData() {
var data = [{ id: "page1", width: "99px", height: "100px", bgcolor: "#FF0000", content: "Page 1" },
{ id: "page2", width: "101px", height: "200px", bgcolor: "#00FF00", content: "Page 2" },
{ id: "page3", width: "200px", height: "101px", bgcolor: "#0000FF", content: "Page 3" },
{ id: "page4", width: "300px", height: "300px", bgcolor: "#FFFF00", content: "Page 4" },
{ id: "page5", width: "200px", height: "300px", bgcolor: "#FF00FF", content: "Page 5" },
{ id: "page6", width: "300px", height: "200px", bgcolor: "#00FFFF", content: "Page 6" },
{ id: "page7", width: "300px", height: "99px", bgcolor: "#AAFFAA", content: "Page 7" }
];
return data;
}
export function flipperDataTemplate(itemPromise) {
return itemPromise.then(function (item) {
var div = document.createElement("div");
div.setAttribute("id", item.data.id);
div.style.width = item.data.width;
div.style.height = item.data.height;
div.style.backgroundColor = item.data.bgColor;
div.textContent = item.data.content;
return div;
});
}
export function create2DFlipper(childInsertPage, parentOptions?, childOptions?) {
function parentDataSource(numItems) {
var testData = [];
for (var i = 0; i < numItems; ++i) {
if (i !== childInsertPage) {
testData.push({ title: "parentTitle" + i, content: "parentContent" });
} else {
// The child flipper will be inserted at this page
testData.push({ title: "childFlipper", childFlipper: 1 });
}
}
return new WinJS.Binding.List(testData).dataSource;
}
function childDataSource(numItems) {
var testData = [];
for (var i = 0; i < numItems; ++i) {
testData.push({ title: "childTitle" + i, content: "childContent" });
}
return new WinJS.Binding.List(testData).dataSource;
}
function parentItemTemplate(itemPromise) {
return itemPromise.then(function (item) {
var result = document.createElement("div");
result.setAttribute("id", item.data.title);
if (item.data.childFlipper) {
if (!childOptions) {
childOptions = {};
}
childOptions.itemDataSource = childFlipperDataSource;
childOptions.itemTemplate = childItemTemplate;
var childFlipper = new FlipView(result, childOptions);
LiveUnit.Assert.isNotNull(childFlipper, "Child flipper element should not be null when instantiated.");
LiveUnit.Assert.isTrue(typeof childFlipper.next === "function", "Child flipper doesn't appear to be a valid flipper.");
} else {
result.innerHTML =
"<div>" + item.data.title + "</div>" +
"<div>" + item.data.content + "</div>";
}
return result;
});
}
function childItemTemplate(itemPromise) {
return itemPromise.then(function (item) {
var result = document.createElement("div");
result.setAttribute("id", item.data.title);
result.innerHTML =
"<div>" + item.data.title + "</div>" +
"<div>" + item.data.content + "</div>";
return result;
});
}
var parentItemCount = 10,
childItemCount = parentItemCount / 2,
parentFlipperDataSource = parentDataSource(parentItemCount),
childFlipperDataSource = childDataSource(childItemCount);
if (!parentOptions) {
parentOptions = {};
}
parentOptions.itemDataSource = parentFlipperDataSource;
parentOptions.itemTemplate = parentItemTemplate;
var parentFlipper = FlipperUtils.instantiate(FlipperUtils.basicFlipperID(), parentOptions);
LiveUnit.Assert.isNotNull(parentFlipper, "Parent flipper element should not be null when instantiated.");
LiveUnit.Assert.isTrue(typeof parentFlipper.next === "function", "Parent flipper Doesn't appear to be a valid flipper.");
return parentFlipper;
}
export function addFlipperDom(size = "200") {
/// <summary>
/// Add a main flipper DOM from HTML
/// </summary>
LiveUnit.LoggingCore.logComment("Add Flipper div \"" + mainFlipperDivId + "\" to the DOM");
var flipperNode = document.createElement("div");
flipperNode.setAttribute("id", mainFlipperDivId);
var htmlString = '<div id="flipper" style="width: ' + size + 'px; height: ' + size + 'px; overflow:hidden">';
flipperNode.innerHTML = htmlString;
document.body.appendChild(flipperNode);
}
export function basicFlipperHtmlIDs() {
/// <summary>
/// Return div ID's for the divs in flipper page
/// </summary>
/// <returns type="array_object"/>
return ['page1', 'page2', 'page3', 'page4', 'page5', 'page6', 'page7'];
}
export function basicFlipperID() {
/// <summary>
/// Return flipper ID for the BasicFlipper.html file
/// </summary>
/// <returns type="string"/>
return 'flipper';
}
export function ensureCurrentPage(flipper, index, callback) {
/// <summary>
/// The FlipView's curentPage setter property is asynchronous. This ensures that it is completed.
/// </summary>
/// <param name="flipper" type="object">
/// The FlipView object.
/// </param>
/// <param name="index" type="integer">
/// The index to jump to.
/// </param>
/// <param name="callback" type="function">
/// The test callback function for when navigation attempt is completed.
/// </param>
var expectedPage = (index < 0) ? 0 : index;
flipper.count().then(function (count) {
expectedPage = (index >= count) ? (count - 1) : index;
});
checkComplete = function (info) {
VerifyLocationAfterJump(flipper, expectedPage, callback);
};
flipper.addEventListener(pageSelectedEvent, checkComplete, false);
var lastPage = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Current page index is: " + lastPage);
LiveUnit.LoggingCore.logComment("Attempt to set page index to: " + index);
flipper.currentPage = index;
}
export function ensureNext(flipper, callback, expectedPage?) {
/// <summary>
/// The FlipView's next function is asynchronous. This ensures that it is completed.
/// </summary>
/// <param name="flipper" type="object">
/// The FlipView object.
/// </param>
/// <param name="callback" type="function">
/// The test callback function for when navigation attempt is completed.
/// </param>
/// <returns type="boolean">
/// Returns the value of the FlipView next method.
/// </returns>
if (typeof (expectedPage) !== 'number') {
expectedPage = flipper.currentPage + 1;
}
checkComplete = function (info) {
VerifyLocationAfterFlip(flipper, expectedPage, callback);
};
LiveUnit.LoggingCore.logComment("Flipper currentPage index before flip to next: " + flipper.currentPage);
flipper.addEventListener(pageSelectedEvent, checkComplete, false);
var flipSuccess = flipper.next();
LiveUnit.LoggingCore.logComment("Flipper next method returned: " + flipSuccess);
if (!flipSuccess) {
LiveUnit.LoggingCore.logComment("Waiting " + NAVIGATION_TIMEOUT + "ms to ensure event doesn't fire.");
setTimeout(function () {
flipper.removeEventListener(pageSelectedEvent, checkComplete, false);
callback();
}, NAVIGATION_TIMEOUT);
}
return flipSuccess;
}
export function ensurePrevious(flipper, callback, expectedPage?) {
/// <summary>
/// The FlipView's previous function is asynchronous. This ensures that it is completed.
/// </summary>
/// <param name="flipper" type="object">
/// The FlipView object.
/// </param>
/// <param name="callback" type="function">
/// The test callback function for when navigation attempt is completed.
/// </param>
/// <returns type="boolean">
/// Returns the value of the FlipView previous method.
/// </returns>
if (typeof (expectedPage) !== 'number') {
expectedPage = flipper.currentPage - 1;
}
checkComplete = function (info) {
VerifyLocationAfterFlip(flipper, expectedPage, callback);
};
flipper.addEventListener(pageSelectedEvent, checkComplete, false);
LiveUnit.LoggingCore.logComment("Flipper currentPage index before flip to previous: " + flipper.currentPage);
var flipSuccess = flipper.previous();
LiveUnit.LoggingCore.logComment("Flip to Previous page returned: " + flipSuccess);
if (!flipSuccess) {
LiveUnit.LoggingCore.logComment("Waiting " + NAVIGATION_TIMEOUT + "ms to ensure event doesn't fire.");
setTimeout(function () {
flipper.removeEventListener(pageVisibilityEvent, checkComplete, false);
callback();
}, NAVIGATION_TIMEOUT);
}
return flipSuccess;
}
export function instantiate(elementID, options?) {
/// <summary>
/// Instantiates a flipper object
/// </summary>
/// <param name="elementID" type="string">
/// The element ID to instantiate the flipper onto.
/// </param>
/// <param name="elementID" type="string">
/// Flipper object options used to instantiate the flipper.
/// </param>
/// <returns type="flipper_object"/>
var paramObject = {};
// Set defaults for verify variables
var currentPageVerify = 0;
var orientationVerify = "horizontal";
var itemSpacingVerify = 0;
var itemDataSource = new WinJS.Binding.List(FlipperUtils.flipperData()).dataSource;
var itemTemplate = FlipperUtils.flipperDataTemplate;
var defaultData = true;
// Get all the passed options
if (options) {
if (options.currentPage) {
var flipperCount = FlipperUtils.flipperData().length;
currentPageVerify = ((options.currentPage < flipperCount) ? options.currentPage : 0);
}
if (options.orientation) {
orientationVerify = options.orientation;
}
if (options.itemSpacing) {
itemSpacingVerify = options.itemSpacing;
}
if (options.itemDataSource) {
itemDataSource = options.itemDataSource;
defaultData = false;
}
if (options.itemTemplate) {
itemTemplate = options.itemTemplate;
defaultData = false;
}
}
else {
options = {};
}
// Add the datasource and template to the options
// This ensures a datasource and template even if options === undefined
options.itemDataSource = itemDataSource;
options.itemTemplate = itemTemplate;
LiveUnit.LoggingCore.logComment("Flipper will be instantiated with the following options:");
if (options.currentPage) {
LiveUnit.LoggingCore.logComment(" currentPage: " + options.currentPage);
}
else {
LiveUnit.LoggingCore.logComment(" currentPage: " + currentPageVerify);
}
if (options.orientation) {
LiveUnit.LoggingCore.logComment(" orientation: " + options.orientation);
}
else {
LiveUnit.LoggingCore.logComment(" orientation: " + orientationVerify);
}
if (options.itemSpacing) {
LiveUnit.LoggingCore.logComment(" itemSpacing: " + options.itemSpacing);
}
else {
LiveUnit.LoggingCore.logComment(" itemSpacing: " + itemSpacingVerify);
}
if (defaultData) {
LiveUnit.LoggingCore.logComment("Using the default itemDataSource and itemTemplate");
}
else {
LiveUnit.LoggingCore.logComment("NOT using the default itemDataSource and itemTemplate");
}
LiveUnit.LoggingCore.logComment("Getting the flipper element by ID:" + elementID);
var flipperElement = document.getElementById(elementID);
LiveUnit.Assert.isTrue(flipperElement !== null, "Unable to find " + elementID + " in the DOM");
LiveUnit.LoggingCore.logComment("Instantiate the flipper.");
var start = new Date(),
end;
var flipper = new FlipView(flipperElement, options);
var fastCustomAnimations = {
next: function () {
return WinJS.Promise.wrap();
},
previous: function () {
return WinJS.Promise.wrap();
},
jump: function () {
return WinJS.Promise.wrap();
},
};
flipper.setCustomAnimations(fastCustomAnimations);
if (flipper) {
LiveUnit.LoggingCore.logComment("Flipper has been instantiated.");
LiveUnit.LoggingCore.logComment("Flipper orientation is: " + flipper.orientation);
LiveUnit.Assert.isTrue(flipper.orientation === orientationVerify, "Flipper orientation is not " + orientationVerify);
LiveUnit.LoggingCore.logComment("Flipper itemSpacing is: " + flipper.itemSpacing);
LiveUnit.Assert.isTrue(flipper.itemSpacing === itemSpacingVerify, "Flipper itemSpacing is not " + itemSpacingVerify);
LiveUnit.LoggingCore.logComment("Flipper currentPage is: " + flipper.currentPage);
LiveUnit.Assert.isTrue(flipper.currentPage === currentPageVerify, "Flipper currentPage is not " + currentPageVerify);
FlipperUtils.verifyFlipperDomAccessibility(elementID);
}
else {
LiveUnit.LoggingCore.logComment("Unable to instantiate Flipper.");
}
return flipper;
}
export function isFlipperItemVisible(elementID) {
/// <summary>
/// Check if current flipper element is visible via DOM validation. This function depends /// upon the implementation details of flipper.
/// </summary>
/// <param name="elementID" type="string">
/// The element ID of the flipper item to check.
/// </param>
/// <returns type="boolean"/>
var flipperElement = document.getElementById(elementID);
if (flipperElement !== null) {
var flipper = document.getElementById(FlipperUtils.basicFlipperID()).winControl;
var parents = <HTMLElement>flipperElement.parentNode.parentNode.parentNode;
var offsetLeft = parents.offsetLeft;
var offsetRight = offsetLeft + parents.offsetWidth;
// _panningDivContainer is a private property and can change depending upon the implementation.
// If failing, check with the dev if this is still the right way to obtain the panning div
// Right now (beta milestone), this info can be found in FlipperHelpers.js file in the dev
// unittests
var panningDivScrollLeft = WinJS.Utilities.getScrollPosition(flipper._panningDivContainer).scrollLeft;
var panningDivScrollRight = panningDivScrollLeft + flipper._panningDivContainer.offsetWidth;
LiveUnit.LoggingCore.logComment("offsetLeft: " + offsetLeft);
LiveUnit.LoggingCore.logComment("offsetRight: " + offsetRight);
LiveUnit.LoggingCore.logComment("panningDivScrollLeft: " + panningDivScrollLeft);
LiveUnit.LoggingCore.logComment("panningDivScrollRight: " + panningDivScrollRight);
LiveUnit.LoggingCore.logComment("Check: (" + offsetRight + " > " + panningDivScrollLeft + ") && (" + offsetLeft + " < " + panningDivScrollRight + ")");
if ((offsetRight > panningDivScrollLeft) && (offsetLeft < panningDivScrollRight)) {
LiveUnit.LoggingCore.logComment(elementID + ": In view.");
return true;
}
else {
LiveUnit.LoggingCore.logComment(elementID + ": In DOM but not in view.");
return false;
}
}
LiveUnit.LoggingCore.logComment(elementID + ": Not in DOM or in view.");
return false;
}
export function removeFlipperDom() {
/// <summary>
/// Remove the flipper HTML from the DOM
/// </summary>
var flipperElement = document.getElementById(mainFlipperDivId);
LiveUnit.LoggingCore.logComment("Remove Flipper div \"" + mainFlipperDivId + "\" from the DOM");
flipperElement.parentNode.removeChild(flipperElement);
}
export function verifyFlipperDomAccessibility(elementID) {
/// <summary>
/// Verify that the Flipper elements in the DOM have appropriate accessibility attributes.
/// This inclues ARIA, role and tabindex properties.
/// <param name="elementID" type="string">
/// The element ID of the flipper item to check.
/// </param>
/// </summary>
var flipperElement = document.getElementById(elementID);
LiveUnit.LoggingCore.logComment("Verifying ARIA and Accessibility properties on flipper object...");
var flipper = flipperElement.winControl;
if (flipper.orientation === "horizontal") {
var prevInfo = <HTMLElement>flipperElement.getElementsByClassName("win-navleft")[0];
var nextInfo = <HTMLElement>flipperElement.getElementsByClassName("win-navright")[0];
}
else {
var prevInfo = <HTMLElement>flipperElement.getElementsByClassName("win-navtop")[0];
var nextInfo = <HTMLElement>flipperElement.getElementsByClassName("win-navbottom")[0];
}
var accessibleFlipper = {
label: flipperElement.getAttribute("aria-label"),
role: flipperElement.getAttribute("role"),
tabindex: flipperElement.getAttribute("tabindex"),
prevlabel: prevInfo.getAttribute("aria-label"),
nextlabel: nextInfo.getAttribute("aria-label"),
prevhidden: prevInfo.getAttribute("aria-hidden"),
nexthidden: nextInfo.getAttribute("aria-hidden")
};
// Defined ARIA labels per specification
var horizontalFlipViewLabel = "",
verticalFlipViewLabel = "",
previousButtonLabel = "Previous",
nextButtonLabel = "Next",
roleLabel = "listbox";
LiveUnit.LoggingCore.logComment("Flipper: ARIA label: " + accessibleFlipper.label)
if (flipper.orientation === "horizontal") {
LiveUnit.Assert.areEqual(horizontalFlipViewLabel, accessibleFlipper.label, "Flipper: ARIA label is not correct.");
}
else {
LiveUnit.Assert.areEqual(verticalFlipViewLabel, accessibleFlipper.label, "Flipper: ARIA label is not correct.");
}
LiveUnit.LoggingCore.logComment("Flipper: role: " + accessibleFlipper.role)
LiveUnit.Assert.areEqual(roleLabel, accessibleFlipper.role, "Flipper: role is not correct.");
LiveUnit.LoggingCore.logComment("Flipper: tabindex: " + accessibleFlipper.tabindex)
// Ensure that tabindex starts at 0 unless it's parent is -1
LiveUnit.Assert.areEqual("-1", accessibleFlipper.tabindex, "Flipper: tabindex is not correct.");
/*
if(flipperElement.parentNode.getAttribute("tabindex") === "-1") {
LiveUnit.Assert.areEqual("-1", accessibleFlipper.tabindex, "Flipper: tabindex is not correct.");
}
else {
LiveUnit.Assert.areEqual("0", accessibleFlipper.tabindex, "Flipper: tabindex is not correct.");
}
*/
LiveUnit.LoggingCore.logComment("Previous Button: ARIA label: " + accessibleFlipper.prevlabel)
LiveUnit.Assert.areEqual(previousButtonLabel, accessibleFlipper.prevlabel, "Previous Button: ARIA label is not correct.");
LiveUnit.LoggingCore.logComment("Next Button: ARIA label: " + accessibleFlipper.nextlabel)
LiveUnit.Assert.areEqual(nextButtonLabel, accessibleFlipper.nextlabel, "Next Button: ARIA label is not correct.");
LiveUnit.LoggingCore.logComment("Previous Button: hidden: " + accessibleFlipper.prevhidden)
LiveUnit.Assert.isNotNull(accessibleFlipper.prevhidden, "Next Button: hidden is not correct.");
LiveUnit.LoggingCore.logComment("Next Button: hidden: " + accessibleFlipper.nexthidden)
LiveUnit.Assert.isNotNull(accessibleFlipper.nexthidden, "Next Button: hidden is not correct.");
}
export function simpleArrayRenderer(itemPromise) {
/// <summary>
/// Custom Item Renderer for the CommonUtil simpleArraryDataSource.
/// </summary>
return itemPromise.then(function (item) {
var result = document.createElement("div");
// The title is unique for each dataObject and is being used as the ID
// so it can be easily looked up later for validation.
result.setAttribute("id", item.data.title);
result.innerHTML =
"<div>" + item.data.title + "</div>" +
"<div>" + item.data.content + "</div>";
return result;
});
}
export function simpleArrayData(totalItems): { title: string; content: string; }[] {
/// <summary>
/// Simple array data object used for manipulation tests against a TestDataSource.
/// <param name="totalItems" type="integer">
/// The number of array elements to create.
/// </param>
/// </summary>
var data = [];
for (var i = 0; i < totalItems; ++i) {
data.push({ title: "Title" + i, content: "Content" + i });
}
return data;
}
export function getAllItemsFromDataSource(itemDataSource) {
/// <summary>
/// Will return a list of all items from the datasource.
/// <param name="itemDataSource" type="DataSourceObject">
/// An object to a datasource.
/// </param>
/// <returns>
/// A list of all items in the datasource.
/// </returns>
/// </summary>
/*
var items = [],
listBinding = itemDataSource.createListBinding(),
remaining = true;
for (var itemPromise = listBinding.first(); remaining; itemPromise = listBinding.next()) {
items.push(itemPromise);
itemPromise.then(function (item) {
if (item) {
items.push(item);
} else {
remaining = false;
}
});
}
listBinding.release();
return items;
*/
return itemDataSource.getCount().then(function (count) {
var listBinding = itemDataSource.createListBinding(),
promises = [];
listBinding.jumpToItem(listBinding.first()).then(function () {
for (var i = 0; i < count; i++) {
promises.push(listBinding.current());
listBinding.next();
}
});
listBinding.release();
return WinJS.Promise.join(promises);
});
}
export function insertItem(insertAction, onSuccess, onError, setEventHandlers?) {
/// <summary>
/// Attempt to insert an item based on the insertAction passed in.
/// Then verify the results of the insert and ensure the insert really happened.
/// <param name="insertAction" type="string">
/// Valid options are "InsertAtStart", "InsertAtEnd", "InsertBefore", "InsertAfter"
/// </param>
/// <param name="onSuccess" type="callback_function">
/// A function to call when the change has completed successfully.
/// </param>
/// <param name="onError" type="callback_function">
/// A function to call when an error occurs.
/// </param>
/// <param name="setEventHandlers" optional="true" type="function">
/// A function passed in that will set up event handlers.
/// </param>
/// </summary>
var totalItems = 10,
itemDataSourceObject = new WinJS.Binding.List(FlipperUtils.simpleArrayData(totalItems)).dataSource,
errors = [],
verifyPosition,
flipper,
insertData = { title: insertAction, content: insertAction },
flipperDiv = document.getElementById(FlipperUtils.basicFlipperID());
// Setup event handlers if passed in.
if (setEventHandlers) {
setEventHandlers(FlipperUtils.basicFlipperID());
}
var action = LiveUnit.GetWrappedCallback(function () {
if (!flipper) {
LiveUnit.Assert.fail("Flipper object is invalid");
}
flipper.removeEventListener(pageSelectedEvent, action);
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object Before " + insertAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
// Setup verify variables to check against after inserting items around.
LiveUnit.LoggingCore.logComment(insertAction + ": Attempting to insert item...");
var verifydschanged = LiveUnit.GetWrappedCallback(function (ev) {
Complete();
});
flipper.addEventListener(datasourceChangedEvent, verifydschanged);
switch (insertAction) {
case "InsertAtStart":
verifyPosition = 0;
itemDataSourceObject.insertAtStart(null, insertData).
then(null, Error);
break;
case "InsertAtEnd":
verifyPosition = totalItems;
itemDataSourceObject.insertAtEnd(null, insertData).
then(null, Error);
break;
case "InsertBefore":
verifyPosition = Math.floor(totalItems / 2);
itemDataSourceObject.itemFromIndex(verifyPosition).then(function (item) {
itemDataSourceObject.insertBefore(null, insertData, item.key).
then(null, Error);
}, Error);
break;
case "InsertAfter":
verifyPosition = Math.floor(totalItems / 2) + 1;
itemDataSourceObject.itemFromIndex(verifyPosition - 1).then(function (item) {
itemDataSourceObject.insertAfter(null, insertData, item.key).
then(null, Error);
}, Error);
break;
default:
LiveUnit.Assert.fail(insertAction + ": Unrecognized insert action.");
}
});
flipperDiv.addEventListener(pageSelectedEvent, action, false);
flipper = FlipperUtils.instantiate(FlipperUtils.basicFlipperID(), { itemDataSource: itemDataSourceObject, itemTemplate: FlipperUtils.simpleArrayRenderer });
LiveUnit.Assert.isNotNull(flipper, "Flipper element should not be null when instantiated.");
// This must run after all promises have completed.
var timeout = setTimeout(function () {
if (errors[0]) {
LiveUnit.LoggingCore.logComment("Errors Detected.");
onError(errors);
}
else {
LiveUnit.LoggingCore.logComment("Edit successful.");
onSuccess();
}
// Setting the timeout to 2 x NAVIGATION_TIMEOUT as datasourcecountchanged takes a long time to fire.
// If the timeout is less, the test moves on and then event listener is executed causing the subsequent tests to fail
}, 2 * NAVIGATION_TIMEOUT);
// The Promise.then onError function that is called if an error occurred on the manipulation action.
function Error(error) {
errors.push(error);
}
function Complete() {
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object After " + insertAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
itemDataSourceObject.itemFromIndex(verifyPosition).then(function (item) {
var verifyItem = item;
if (verifyItem.data.title === insertData.title) {
LiveUnit.LoggingCore.logComment(insertAction + ": Insert succeeded.");
// Now verify the flipper.
var currentPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Current position: " + currentPosition);
LiveUnit.LoggingCore.logComment("Data inserted at position: " + verifyPosition);
LiveUnit.LoggingCore.logComment("Attempt flip to new position...");
var verify = LiveUnit.GetWrappedCallback(function (ev) {
var newPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Flipper is now at position: " + newPosition);
LiveUnit.Assert.isTrue(newPosition === verifyPosition, "New position is not at correct position.");
LiveUnit.LoggingCore.logComment("Verify data at current position...");
var element = flipper._pageManager._currentPage.element.firstElementChild;
LiveUnit.Assert.isTrue(element.id === insertData.title, "Flipper pageManager is not showing data at current position.");
LiveUnit.Assert.isTrue(FlipperUtils.isFlipperItemVisible(insertData.title), "Flipper is not showing correct data.");
});
FlipperUtils.ensureCurrentPage(flipper, verifyPosition, verify);
}
else {
LiveUnit.LoggingCore.logComment("Data that was inserted was not at the location it was expected to be at.");
LiveUnit.LoggingCore.logComment("Expected Position: " + verifyPosition);
LiveUnit.LoggingCore.logComment("Expected title of data inserted: " + insertData.title);
LiveUnit.LoggingCore.logComment("Actual title of data at expected position: " + verifyItem.data.title);
LiveUnit.Assert.fail(insertAction + ": Failed to insert item.");
}
}, Error);
}
}
export function moveItem(moveAction, onSuccess, onError, setEventHandlers?) {
/// <summary>
/// Attempt to move an item based on the moveAction passed in.
/// Then verify the results of the move and ensure the move really happened.
/// Passing signalTestCaseCompleted function allows it to be called later when the verification in the callbacks are completed.
/// <param name="moveAction" type="string">
/// Valid options are "MoveToStart", "MoveToEnd", "MoveBefore", "MoveAfter"
/// </param>
/// <param name="onSuccess" type="callback_function">
/// A function to call when the change has completed successfully.
/// </param>
/// <param name="onError" type="callback_function">
/// A function to call when an error occurs.
/// </param>
/// <param name="setEventHandlers" optional="true" type="function">
/// A function passed in that will set up event handlers.
/// </param>
/// </summary>
var totalItems = 10,
itemDataSourceObject = new WinJS.Binding.List(FlipperUtils.simpleArrayData(totalItems)).dataSource,
movePosition,
verifyPosition,
errors = [],
flipperDiv = document.getElementById(FlipperUtils.basicFlipperID()),
moveData;
// Setup event handlers if passed in.
if (setEventHandlers) {
setEventHandlers(FlipperUtils.basicFlipperID());
}
var action = LiveUnit.GetWrappedCallback(function () {
if (!flipper) {
LiveUnit.Assert.fail("Flipper object is invalid");
}
flipper.removeEventListener(pageSelectedEvent, action);
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object Before " + moveAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
// Setup verify variables to check against after moving items around.
LiveUnit.LoggingCore.logComment(moveAction + ": Attempting to move item...");
switch (moveAction) {
case "MoveToStart":
verifyPosition = 0;
itemDataSourceObject.itemFromIndex(totalItems / 2).then(function (item) {
moveData = item;
itemDataSourceObject.moveToStart(item.key).
then(Complete, Error).
then(null, Error);
}, Error);
break;
case "MoveToEnd":
verifyPosition = totalItems - 1;
itemDataSourceObject.itemFromIndex(totalItems / 2).then(function (item) {
moveData = item;
itemDataSourceObject.moveToEnd(item.key).
then(Complete, Error).
then(null, Error);
}, Error);
break;
case "MoveBefore":
// Grab the data item that you want to move from
itemDataSourceObject.itemFromIndex(totalItems / 2).then(function (moveDataItem) {
// Data being moved
moveData = moveDataItem;
// Data being moved to
movePosition = Math.floor((totalItems / 2) / 2);
verifyPosition = movePosition;
// Grab the data that you want to move to
itemDataSourceObject.itemFromIndex(movePosition).then(function (movePositionItem) {
itemDataSourceObject.moveBefore(moveDataItem.key, movePositionItem.key).
then(Complete, Error).
then(null, Error);
}, Error);
}, Error);
break;
case "MoveAfter":
// Grab the data item that you want to move from
itemDataSourceObject.itemFromIndex(totalItems / 2).then(function (moveDataItem) {
// Data being moved
moveData = moveDataItem;
// Data being moved to
movePosition = Math.floor((totalItems / 2) / 2);
verifyPosition = movePosition + 1;
// Grab the data that you want to move to
itemDataSourceObject.itemFromIndex(movePosition).then(function (movePositionItem) {
itemDataSourceObject.moveAfter(moveDataItem.key, movePositionItem.key).
then(Complete, Error).
then(null, Error);
}, Error);
}, Error);
break;
default:
LiveUnit.Assert.fail(moveAction + ": Unrecognized move action.");
}
});
flipperDiv.addEventListener(pageSelectedEvent, action);
var flipper = FlipperUtils.instantiate(FlipperUtils.basicFlipperID(), { itemDataSource: itemDataSourceObject, itemTemplate: FlipperUtils.simpleArrayRenderer });
LiveUnit.Assert.isNotNull(flipper, "Flipper element should not be null when instantiated.");
// This must run after all promises have completed.
var timeout = setTimeout(function () {
if (errors[0]) {
LiveUnit.LoggingCore.logComment("Errors Detected.");
onError(errors);
}
else {
LiveUnit.LoggingCore.logComment("Edit successful.");
onSuccess();
}
// Setting the timeout to 3 x NAVIGATION_TIMEOUT as complete takes a long time to fire.
// If the timeout is less, the test moves on and then event listener is executed causing the subsequent tests to fail
}, 3 * NAVIGATION_TIMEOUT);
// The Promise.then onError function that is called if an error occurred on the manipulation action.
function Error(error) {
errors.push(error);
}
// The Promise.then onComplete function that determines whether the edit was successful or not.
function Complete() {
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object After " + moveAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
itemDataSourceObject.itemFromIndex(verifyPosition).then(function (item) {
var verifyItem = item;
if (verifyItem.data.title === moveData.data.title) {
LiveUnit.LoggingCore.logComment(moveAction + ": Move succeeded.");
// Now verify the flipper.
var currentPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Current position: " + currentPosition);
LiveUnit.LoggingCore.logComment("Verifying at position: " + verifyPosition);
LiveUnit.LoggingCore.logComment("Attempt flip to new position...");
FlipperUtils.ensureCurrentPage(flipper, verifyPosition, CurrentPageCompleted);
}
else {
LiveUnit.LoggingCore.logComment("Data that was moved was not at the location it was expected to be at.");
LiveUnit.LoggingCore.logComment("Expected Position: " + verifyPosition);
LiveUnit.LoggingCore.logComment("Expected title of data moved: " + moveData.data.title);
LiveUnit.LoggingCore.logComment("Actual title of data at expected position: " + verifyItem.data.title);
LiveUnit.Assert.fail(moveAction + ": Failed to move item.");
}
function CurrentPageCompleted() {
var newPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Flipper is now at position: " + newPosition);
LiveUnit.Assert.isTrue(newPosition === verifyPosition, "New position is not at correct position.");
LiveUnit.LoggingCore.logComment("Verify data at current position...");
var element = flipper._pageManager._currentPage.element.firstElementChild;
LiveUnit.Assert.isTrue(element.id === moveData.data.title, "Flipper pageManager is not showing data at current position.");
LiveUnit.Assert.isTrue(FlipperUtils.isFlipperItemVisible(moveData.data.title), "Flipper is not showing correct data.");
}
}, Error);
}
}
export function removeItem(removeAction, onSuccess, onError, setEventHandlers?) {
/// <summary>
/// Attempt to remove an item based on the removeAction passed in.
/// Then verify the results of the remove and ensure the remove really happened.
/// Passing signalTestCaseCompleted function allows it to be called later when the verification in the callbacks are completed.
/// <param name="removeAction" type="string">
/// Valid options are "RemoveFromStart", "RemoveFromEnd", "RemoveFromMiddle"
/// </param>
/// <param name="onSuccess" type="callback_function">
/// A function to call when the change has completed successfully.
/// </param>
/// <param name="onError" type="callback_function">
/// A function to call when an error occurs.
/// </param>
/// <param name="setEventHandlers" optional="true" type="function">
/// A function passed in that will set up event handlers.
/// </param>
/// </summary>
var totalItems = 10,
itemDataSourceObject = Helper.ItemsManager.createTestDataSource(FlipperUtils.simpleArrayData(totalItems), controller, null),
flipper,
flipperDiv = document.getElementById(FlipperUtils.basicFlipperID()),
removePosition,
removeData,
errors = [];
// Setup event handlers if passed in.
if (setEventHandlers) {
setEventHandlers(FlipperUtils.basicFlipperID());
}
var action = LiveUnit.GetWrappedCallback(function () {
if (!flipper) {
LiveUnit.Assert.fail("Flipper is not a valid object");
}
flipper.removeEventListener(pageSelectedEvent, action);
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object Before " + removeAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
// Setup verify variables to check against after removing items.
LiveUnit.LoggingCore.logComment(removeAction + ": Attempting to remove item...");
switch (removeAction) {
case "RemoveFromStart":
removePosition = 0;
break;
case "RemoveFromEnd":
removePosition = totalItems - 1;
break;
case "RemoveFromMiddle":
removePosition = Math.floor(totalItems / 2);
break;
default:
LiveUnit.Assert.fail(removeAction + ": Unrecognized remove action.");
}
var verifydschanged = LiveUnit.GetWrappedCallback(function () {
Complete();
});
flipper.addEventListener(datasourceChangedEvent, verifydschanged);
// Remove the specified item.
itemDataSourceObject.itemFromIndex(removePosition).then(function (item) {
removeData = item;
itemDataSourceObject.remove(item.key).
then(null, Error);
}, Error);
});
flipperDiv.addEventListener(pageSelectedEvent, action);
flipper = FlipperUtils.instantiate(FlipperUtils.basicFlipperID(), { itemDataSource: itemDataSourceObject, itemTemplate: FlipperUtils.simpleArrayRenderer });
LiveUnit.Assert.isNotNull(flipper, "Flipper element should not be null when instantiated.");
// This must run after all promises have completed.
var timeout = setTimeout(function () {
if (errors[0]) {
LiveUnit.LoggingCore.logComment("Errors Detected.");
onError(errors);
}
else {
LiveUnit.LoggingCore.logComment("Edit successful.");
onSuccess();
}
// Setting the timeout to 2 x NAVIGATION_TIMEOUT as datasourcecountchanged takes a long time to fire.
// If the timeout is less, the test moves on and then event listener is executed causing the subsequent tests to fail
}, 2 * NAVIGATION_TIMEOUT);
// The Promise.then onError function that is called if an error occurred on the manipulation action.
function Error(error) {
errors.push(error);
}
// The Promise.then onComplete function that determines whether the edit was successful or not.
function Complete() {
LiveUnit.LoggingCore.logComment(removeAction + " completed.");
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object After " + removeAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
itemDataSourceObject.getCount().then(function (verifyCount) {
LiveUnit.LoggingCore.logComment("Total items in data object before remove: " + totalItems);
LiveUnit.LoggingCore.logComment("Total items in data object after remove: " + verifyCount);
LiveUnit.Assert.areEqual(totalItems - 1, verifyCount, "Total items after remove should be 1 less than before.");
// Retrieve all items from datasource and verify item was removed.
LiveUnit.LoggingCore.logComment("DataSource Object After " + removeAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
if (items[i].data.title === removeData.data.title) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
LiveUnit.LoggingCore.logComment("Data that was supposed to be removed was found in the datasource object.");
LiveUnit.LoggingCore.logComment("Removed data title: " + removeData.title);
LiveUnit.Assert.fail(removeAction + ": Remove failed.");
}
}
});
LiveUnit.LoggingCore.logComment("Removed data not found in datasource.");
// Verify that the removed data is not in the DOM.
LiveUnit.LoggingCore.logComment("Verifying that removed data is not in the DOM.");
LiveUnit.Assert.isNull(document.getElementById(removeData.title), "Found " + removeData.title + " in the DOM.");
},
function (err) {
errors.push(err);
});
}
}
export function changeItem(changeAction, onSuccess, onError, setEventHandlers?) {
/// <summary>
/// Attempt to change an item based on the changeAction passed in.
/// Then verify the results of the change and ensure the change really happened.
/// Passing signalTestCaseCompleted function allows it to be called later when the verification in the callbacks are completed.
/// <param name="changeAction" type="string">
/// Valid options are "ChangeAtStart", "ChangeAtEnd", "ChangeAtMiddle", "ChangeInvalid"
/// </param>
/// <param name="onSuccess" type="callback_function">
/// A function to call when the change has completed successfully.
/// </param>
/// <param name="onError" type="callback_function">
/// A function to call when an error occurs.
/// </param>
/// <param name="setEventHandlers" optional="true" type="function">
/// A function passed in that will set up event handlers.
/// </param>
/// </summary>
var totalItems = 10,
itemDataSourceObject = Helper.ItemsManager.createTestDataSource(FlipperUtils.simpleArrayData(totalItems), controller, null),
flipper,
flipperDiv = document.getElementById(FlipperUtils.basicFlipperID()),
verifyPosition,
errors = [],
changeData = { title: changeAction, content: changeAction },
previousData: any = {};
// Setup event handlers if passed in.
if (setEventHandlers) {
setEventHandlers(FlipperUtils.basicFlipperID());
}
var action = LiveUnit.GetWrappedCallback(function () {
if (!flipper) {
LiveUnit.Assert.fail("Flipper is not a valid object");
}
flipper.removeEventListener(pageSelectedEvent, action);
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object Before " + changeAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
// Setup verify variables to check against after moving items around.
LiveUnit.LoggingCore.logComment(changeAction + ": Attempting to change item...");
switch (changeAction) {
case "ChangeAtStart":
verifyPosition = 0;
break;
case "ChangeAtEnd":
verifyPosition = totalItems - 1;
break;
case "ChangeAtMiddle":
verifyPosition = Math.floor(totalItems / 2);
break;
case "ChangeInvalid":
verifyPosition = totalItems + 10;
break;
default:
LiveUnit.Assert.fail(changeAction + ": Unrecognized change action.");
}
if (changeAction !== "ChangeInvalid") {
itemDataSourceObject.itemFromIndex(verifyPosition).then(function (item) {
previousData = item;
itemDataSourceObject.change(verifyPosition.toString(), changeData).
then(Complete, Error).
then(null, Error);
}, Error);
} else {
itemDataSourceObject.change(verifyPosition.toString(), changeData).
then(Complete, Error).
then(null, Error);
}
});
flipperDiv.addEventListener(pageSelectedEvent, action);
flipper = FlipperUtils.instantiate(FlipperUtils.basicFlipperID(), { itemDataSource: itemDataSourceObject, itemTemplate: FlipperUtils.simpleArrayRenderer });
LiveUnit.Assert.isNotNull(flipper, "Flipper element should not be null when instantiated.");
// This must run after all promises have completed.
var timeout = setTimeout(function () {
if (errors[0]) {
LiveUnit.LoggingCore.logComment("Errors Detected.");
onError(errors);
}
else {
LiveUnit.LoggingCore.logComment("Edit successful.");
onSuccess();
}
// Setting the timeout to 2 x NAVIGATION_TIMEOUT as complete takes a long time to fire.
// If the timeout is less, the test moves on and then event listener is executed causing the subsequent tests to fail
}, 2 * NAVIGATION_TIMEOUT);
// The Promise.then onError function that is called if an error occurred on the manipulation action.
function Error(error) {
errors.push(error);
}
// The Promise.then onComplete function that determines whether the edit was successful or not.
function Complete() {
// Retrieve all items from datasource for logging purposes.
LiveUnit.LoggingCore.logComment("DataSource Object After " + changeAction);
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
}
});
itemDataSourceObject.itemFromIndex(verifyPosition).then(function (item) {
var verifyItem = item;
if (verifyItem.data.title === changeData.title) {
LiveUnit.LoggingCore.logComment("Change data was found at expected location in datasource.");
LiveUnit.LoggingCore.logComment(changeAction + ": Change succeeded.");
// Verify that the data replaced by the updated data is not in the datasource object.
FlipperUtils.getAllItemsFromDataSource(itemDataSourceObject).then(function (items) {
for (var i = 0; i < items.length; i++) {
if (previousData.data === items[i].data) {
LiveUnit.LoggingCore.logComment("key: " + items[i].key + ", title: " + items[i].data.title);
LiveUnit.LoggingCore.logComment("Data that was supposed to be changed was found in the datasource object.");
LiveUnit.Assert.fail(changeAction + ": Change failed.");
}
}
});
LiveUnit.LoggingCore.logComment("Previous data not found in datasource.");
// Switch to the changed page and then check that changed data is no longer in the DOM.
var verify = LiveUnit.GetWrappedCallback(function () {
LiveUnit.LoggingCore.logComment("Verifying that previous data is not in the DOM.");
LiveUnit.Assert.isTrue(document.getElementById(previousData.data.title) === null, "Found " + previousData.data.title + " in the DOM.");
// Now verify the flipper.
var newPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Flipper is now at position: " + newPosition);
LiveUnit.LoggingCore.logComment("Verify data at current position...");
var element = flipper._pageManager._currentPage.element.firstElementChild;
LiveUnit.Assert.isTrue(element.id === changeData.title, "Flipper pageManager is not showing data at current position.");
LiveUnit.Assert.isTrue(FlipperUtils.isFlipperItemVisible(changeData.title), "Flipper is not showing correct data.");
});
var currentPosition = flipper.currentPage;
LiveUnit.LoggingCore.logComment("Current position: " + currentPosition);
LiveUnit.LoggingCore.logComment("Verifying at position: " + verifyPosition);
LiveUnit.LoggingCore.logComment("Attempt flip to new position...");
FlipperUtils.ensureCurrentPage(flipper, verifyPosition, verify);
}
else {
LiveUnit.LoggingCore.logComment("Change data not found at expected location in the datasource.");
LiveUnit.Assert.fail(changeAction + ": Change failed.");
}
}, Error);
}
}
} | the_stack |
import * as action from '../actions';
import { PlaywrightFluent } from '../fluent-api';
import {
ClickOptions,
defaultClickOptions,
defaultHoverOptions,
defaultVerboseOptions,
HoverOptions,
Point,
SelectOptionInfo,
SerializableDOMRect,
VerboseOptions,
} from '../actions';
import { ElementHandle } from 'playwright';
type Action = (handles: ElementHandle<Element>[]) => Promise<ElementHandle<Element>[]>;
interface ActionInfoWithoutParam {
name: 'parent' | 'nextSibling' | 'previousSibling' | 'unknown';
}
interface ActionInfoWithSelector {
name: 'querySelectorAllInPage' | 'find';
selector: string;
}
interface ActionInfoWithText {
name: 'withText' | 'withExactText' | 'withValue' | 'withPlaceholder';
text: string;
}
interface ActionInfoWithIndex {
name: 'nth';
index: number;
}
type ActionInfo =
| ActionInfoWithoutParam
| ActionInfoWithSelector
| ActionInfoWithText
| ActionInfoWithIndex;
interface SelectorState {
actions: ActionInfo[];
chainingHistory: string;
}
export class SelectorFluent {
private chainingHistory = '';
private pwf: PlaywrightFluent;
private actionInfos: ActionInfo[] = [];
private getActionFrom(actionInfo: ActionInfo): Action {
switch (actionInfo.name) {
case 'querySelectorAllInPage':
return () =>
action.querySelectorAllInPage(actionInfo.selector, this.pwf.currentPageOrFrame());
case 'find':
return (handles) => action.querySelectorAllFromHandles(actionInfo.selector, [...handles]);
case 'nth':
return (handles) => action.getNthHandle(actionInfo.index, [...handles]);
case 'parent':
return (handles) => action.getParentsOf([...handles]);
case 'nextSibling':
return (handles) => action.getNextSiblingsOf([...handles]);
case 'previousSibling':
return (handles) => action.getPreviousSiblingsOf([...handles]);
case 'withText':
return (handles) => action.getHandlesWithText(actionInfo.text, [...handles]);
case 'withExactText':
return (handles) => action.getHandlesWithExactText(actionInfo.text, [...handles]);
case 'withValue':
return (handles) => action.getHandlesWithValue(actionInfo.text, [...handles]);
case 'withPlaceholder':
return (handles) => action.getHandlesWithPlaceholder(actionInfo.text, [...handles]);
default:
throw new Error(`Action '${actionInfo.name}' is not yet implemented`);
}
}
private async executeActions(): Promise<ElementHandle<Element>[]> {
let handles: ElementHandle<Element>[] = [];
for (let index = 0; index < this.actionInfos.length; index++) {
const action = this.getActionFrom(this.actionInfos[index]);
handles = await action([...handles]);
}
return handles;
}
/**
* Executes the search.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @returns {Promise<ElementHandle<Element>[]>} will return an empty array if no elements are found, will return all found elements otherwise.
* @memberof SelectorFluent
*/
public async getAllHandles(): Promise<ElementHandle<Element>[]> {
const handles = await this.executeActions();
return handles;
}
/**
* Iterate over each found selector
* The index is the 1-based index of the selector in the list of selectors
* @param {(selector: SelectorFluent, index: number) => Promise<void>} func
* @returns {Promise<void>}
* @memberof SelectorFluent
* @example
* const rows = p.selector('[role="row"]');
* await rows.forEach(async (row) => {
* const checkbox = row.find('input[type="checkbox"]');
* await p.hover(checkbox).check(checkbox);
* });
*/
public async forEach(
func: (selector: SelectorFluent, index: number) => Promise<void>,
): Promise<void> {
const selectorsCount = await this.count();
for (let i = 1; i <= selectorsCount; i++) {
const selectorItem = this.nth(i);
await func(selectorItem, i);
}
}
/**
* Obsolete: please use the getHandle() method.
* Executes the search and returns the first found element.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @returns {Promise<ElementHandle<Element> | null>} will return null if no elements are found, will return first found element otherwise.
* @memberof SelectorFluent
* @obsolete
*/
public async getFirstHandleOrNull(): Promise<ElementHandle<Element> | null> {
const handles = await this.executeActions();
if (handles.length === 0) {
return null;
}
return handles[0];
}
/**
* Executes the search and returns the first found element.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @returns {Promise<ElementHandle<Element> | null>} will return null if no elements are found, will return first found element otherwise.
* @memberof SelectorFluent
*/
public async getHandle(): Promise<ElementHandle<Element> | null> {
const handles = await this.executeActions();
if (handles.length === 0) {
return null;
}
return handles[0];
}
/**
* Gets the number of found elements.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @returns {Promise<number>} will return 0 if no elements are found.
* @memberof SelectorFluent
*/
public async count(): Promise<number> {
const handles = await this.executeActions();
return handles.length;
}
/**
*
*/
constructor(selector: string, pwf: PlaywrightFluent, stringifiedState?: string) {
this.pwf = pwf;
if (stringifiedState) {
const state = JSON.parse(stringifiedState) as SelectorState;
this.chainingHistory = state.chainingHistory;
this.actionInfos = state.actions;
return;
}
this.chainingHistory = `selector(${selector})`;
this.actionInfos.push({ name: 'querySelectorAllInPage', selector });
}
public toString(): string {
return this.chainingHistory;
}
private createSelectorFrom(
selector: string,
actions: ActionInfo[],
chainingHistory: string,
): SelectorFluent {
const state: SelectorState = {
actions,
chainingHistory,
};
return new SelectorFluent(selector, this.pwf, JSON.stringify(state));
}
public find(selector: string): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'find', selector });
const chainingHistory = `${this.chainingHistory}
.find(${selector})`;
return this.createSelectorFrom(selector, actions, chainingHistory);
}
/**
* Finds, from previous search, all elements whose innerText contains the specified text
*
* @param {string} text
* @returns {SelectorFluent}
* @memberof SelectorFluent
*/
public withText(text: string): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'withText', text });
const chainingHistory = `${this.chainingHistory}
.withText(${text})`;
return this.createSelectorFrom(text, actions, chainingHistory);
}
/**
* Finds, from previous search, all elements whose innerText match exactly the specified text.
* Use that method when you need to find elements with empty content.
* @param {string} text
* @returns {SelectorFluent}
* @memberof SelectorFluent
*/
public withExactText(text: string): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'withExactText', text });
const chainingHistory = `${this.chainingHistory}
.withExactText(${text})`;
return this.createSelectorFrom(text, actions, chainingHistory);
}
/**
* Finds, from previous search, all elements whose value contains the specified text
*
* @param {string} text
* @returns {SelectorFluent}
* @memberof SelectorFluent
*/
public withValue(text: string): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'withValue', text });
const chainingHistory = `${this.chainingHistory}
.withValue(${text})`;
return this.createSelectorFrom(text, actions, chainingHistory);
}
/**
* Finds, from previous search, all elements whose placeholder contains the specified text
*
* @param {string} text
* @returns {SelectorFluent}
* @memberof SelectorFluent
*/
public withPlaceholder(text: string): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'withPlaceholder', text });
const chainingHistory = `${this.chainingHistory}
.withPlaceholder(${text})`;
return this.createSelectorFrom(text, actions, chainingHistory);
}
public parent(): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'parent' });
const chainingHistory = `${this.chainingHistory}
.parent()`;
return this.createSelectorFrom('', actions, chainingHistory);
}
public nextSibling(): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'nextSibling' });
const chainingHistory = `${this.chainingHistory}
.nextSibling()`;
return this.createSelectorFrom('', actions, chainingHistory);
}
public previousSibling(): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'previousSibling' });
const chainingHistory = `${this.chainingHistory}
.previousSibling()`;
return this.createSelectorFrom('', actions, chainingHistory);
}
/**
* Takes the nth element found at the previous step
*
* @param {number} index : 1-based index
* @returns {SelectorFluent}
* @memberof SelectorFluent
* @example
* nth(1): take the first element found at previous step.
* nth(-1): take the last element found at previous step.
*/
public nth(index: number): SelectorFluent {
const actions = [...this.actionInfos];
actions.push({ name: 'nth', index });
const chainingHistory = `${this.chainingHistory}
.nth(${index})`;
return this.createSelectorFrom('', actions, chainingHistory);
}
/**
* Checks if selector exists.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the disability status is the one known when executing this method.
*
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async exists(): Promise<boolean> {
const handle = await this.getHandle();
if (handle === null) {
return false;
}
return true;
}
/**
* Checks if selector is not in the DOM.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the existence status is the one known when executing this method.
*
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async doesNotExist(): Promise<boolean> {
const handle = await this.getHandle();
if (handle === null) {
return true;
}
return false;
}
/**
* Checks if the selector is visible.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the visibilty status is the one known when executing this method.
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isVisible(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementVisible = await action.isHandleVisible(handle, verboseOptions);
return isElementVisible;
}
/**
* Checks if the selector is visible in the current viewport.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the visibilty status is the one known when executing this method.
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isVisibleInViewport(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementVisible = await action.isHandleVisibleInViewport(handle, verboseOptions);
return isElementVisible;
}
/**
* Checks that the selector is not visible.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the visibilty status is the one known when executing this method.
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isNotVisible(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementNotVisible = await action.isHandleNotVisible(handle, verboseOptions);
return isElementNotVisible;
}
/**
* Checks that the selector is not visible in the current viewport.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the visibilty status is the one known when executing this method.
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isNotVisibleInViewport(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementNotVisible = await action.isHandleNotVisibleInViewport(handle, verboseOptions);
return isElementNotVisible;
}
/**
* Checks if the selector is enabled.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the enability status is the one known when executing this method.
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isEnabled(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementEnabled = await action.isHandleEnabled(handle, verboseOptions);
return isElementEnabled;
}
/**
* Checks if the selector is disabled.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the disability status is the one known when executing this method.
*
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isDisabled(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementDisabled = await action.isHandleDisabled(handle, verboseOptions);
return isElementDisabled;
}
/**
* Checks if the selector is read-only.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the disability status is the one known when executing this method.
*
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isReadOnly(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const isElementReadOnly = await action.isHandleReadOnly(handle, verboseOptions);
return isElementReadOnly;
}
/**
* Checks if the selector is not read-only.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the disability status is the one known when executing this method.
*
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isNotReadOnly(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const isReadOnly = await this.isReadOnly(options);
return !isReadOnly;
}
public async innerText(): Promise<string | undefined | null> {
const handle = await this.getHandle();
const innerText = await action.getInnerTextOfHandle(handle);
return innerText;
}
public async value(): Promise<string | undefined | null> {
const handle = await this.getHandle();
const value = await action.getValueOfHandle(handle);
return value;
}
public async classList(): Promise<string[]> {
const handle = await this.getHandle();
const result = await action.getClassListOfHandle(handle);
return result;
}
public async getAttribute(attributeName: string): Promise<string | null> {
const handle = await this.getHandle();
const result = await action.getAttributeOfHandle(attributeName, handle);
return result;
}
/**
* Get the placeholder content
*
* @returns {(Promise<string | null>)}
* @memberof SelectorFluent
*/
public async placeholder(): Promise<string | null> {
return this.getAttribute('placeholder');
}
/**
* Get the client rectangle of the selector
*
* @returns {(Promise<SerializableDOMRect | null>)}
* @memberof SelectorFluent
*/
public async clientRectangle(): Promise<SerializableDOMRect | null> {
const handle = await this.getHandle();
const result = await action.getClientRectangleOfHandle(handle);
return result;
}
/**
* Get the position of the center of selector's bounding box.
*
* @returns {(Promise<Point | null>)}
* @memberof SelectorFluent
*/
public async position(): Promise<Point | null> {
const handle = await this.getHandle();
const result = await action.getClientRectangleOfHandle(handle);
if (result) {
const x = result.left + result.width / 2;
const y = result.top + result.height / 2;
return {
x,
y,
};
}
return null;
}
/**
* Get the position of the left centered point of the selector's bounding box.
*
* @returns {(Promise<Point | null>)}
* @memberof SelectorFluent
*/
public async leftPosition(): Promise<Point | null> {
const handle = await this.getHandle();
const result = await action.getClientRectangleOfHandle(handle);
if (result) {
const x = result.left;
const y = result.top + result.height / 2;
return {
x,
y,
};
}
return null;
}
/**
* Get the position of the right centered point of the selector's bounding box.
*
* @returns {(Promise<Point | null>)}
* @memberof SelectorFluent
*/
public async rightPosition(): Promise<Point | null> {
const handle = await this.getHandle();
const result = await action.getClientRectangleOfHandle(handle);
if (result) {
const x = result.left + result.width;
const y = result.top + result.height / 2;
return {
x,
y,
};
}
return null;
}
/**
* Checks that selector has the an attribute with an expected value
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @param {string} attributeName
* @param {string} expectedAttributeValue
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async hasAttributeWithValue(
attributeName: string,
expectedAttributeValue: string,
): Promise<boolean> {
const handle = await this.getHandle();
const result = await action.hasHandleAttribute(handle, attributeName, expectedAttributeValue);
return result;
}
/**
* Checks that selector has the specified class
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @param {string} expectedClass
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async hasClass(expectedClass: string): Promise<boolean> {
const handle = await this.getHandle();
const result = await action.hasHandleClass(handle, expectedClass);
return result;
}
/**
* Checks that selector does not have the specified class
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
*
* @param {string} expectedClass
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async doesNotHaveClass(expectedClass: string): Promise<boolean> {
const handle = await this.getHandle();
const result = await action.hasNotHandleClass(handle, expectedClass);
return result;
}
/**
* Checks that the selector is checked.
* If the selector targets multiple DOM elements, this check is done only on the first one found.
* The result may differ from one execution to another
* especially if targeted element is rendered lately because its data is based on some backend response.
* So the checked status is the one known when executing this method.
*
* @param {Partial<VerboseOptions>} [options=defaultVerboseOptions]
* @returns {Promise<boolean>}
* @memberof SelectorFluent
*/
public async isChecked(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const result = await action.isHandleChecked(handle, verboseOptions);
return result;
}
public async isUnchecked(
options: Partial<VerboseOptions> = defaultVerboseOptions,
): Promise<boolean> {
const verboseOptions = {
...defaultVerboseOptions,
options,
};
const handle = await this.getHandle();
const result = await action.isHandleUnchecked(handle, verboseOptions);
return result;
}
public async options(): Promise<SelectOptionInfo[]> {
const handle = await this.getHandle();
const result = await action.getAllOptionsOfHandle(handle, this.toString());
return result;
}
public async allSelectedOptions(): Promise<SelectOptionInfo[]> {
const handle = await this.getHandle();
const allOptions = await action.getAllOptionsOfHandle(handle, this.toString());
return allOptions.filter((option) => option.selected);
}
public async selectedOption(): Promise<SelectOptionInfo | undefined> {
const handle = await this.getHandle();
const allOptions = await action.getAllOptionsOfHandle(handle, this.toString());
const selectedOption = allOptions.find((option) => option.selected);
return selectedOption;
}
/**
* hover over selector
* @param {Partial<HoverOptions>} [options=defaultHoverOptions]
* @returns {Promise<void>}
* @memberof SelectorFluent
*/
public async hover(options: Partial<HoverOptions> = defaultHoverOptions): Promise<void> {
const handle = await this.getHandle();
const hoverOptions = {
...defaultHoverOptions,
...options,
} as HoverOptions;
await action.hoverOnHandle(
handle,
this.toString(),
this.pwf.currentPageOrFrame(),
hoverOptions,
);
}
/**
* click on selector
*
* @param {Partial<ClickOptions>} [options=defaultClickOptions]
* @returns {Promise<void>}
* @memberof SelectorFluent
*/
public async click(options: Partial<ClickOptions> = defaultClickOptions): Promise<void> {
const handle = await this.getHandle();
const clickOptions = {
...defaultClickOptions,
...options,
} as ClickOptions;
await action.clickOnHandle(
handle,
this.toString(),
this.pwf.currentPageOrFrame(),
clickOptions,
);
}
} | the_stack |
import Resolution from '../index';
import ResolutionError, {ResolutionErrorCode} from '../errors/resolutionError';
import {NullAddress} from '../types';
import {
CryptoDomainWithoutResolver,
CryptoDomainWithTwitterVerification,
mockAsyncMethods,
expectSpyToBeCalled,
expectResolutionErrorCode,
protocolLink,
expectConfigurationErrorCode,
CryptoDomainWithoutGunDbRecords,
CryptoDomainWithAllRecords,
skipItInLive,
mockAPICalls,
} from './helpers';
import FetchProvider from '../FetchProvider';
import {NamingServiceName} from '../types/publicTypes';
import Uns from '../Uns';
import Networking from '../utils/Networking';
import {ConfigurationErrorCode} from '../errors/configurationError';
import {TokenUriMetadata} from '../types/publicTypes';
import liveData from './testData/liveData.json';
import UnsConfig from '../config/uns-config.json';
import nock from 'nock';
let resolution: Resolution;
let uns: Uns;
beforeEach(async () => {
jest.restoreAllMocks();
resolution = new Resolution({
sourceConfig: {uns: {url: protocolLink(), network: 'rinkeby'}},
});
uns = resolution.serviceMap[NamingServiceName.UNS] as Uns;
});
describe('UNS', () => {
it('should define the default uns contract', () => {
expect(uns).toBeDefined();
expect(uns.network).toBe(4);
expect(uns.url).toBe(protocolLink());
});
it('should not allow missing config for custom network', async () => {
await expectConfigurationErrorCode(
() =>
new Resolution({
sourceConfig: {
uns: {network: 'ropsten'},
},
}),
ConfigurationErrorCode.CustomNetworkConfigMissing,
);
});
it('checks the record by key', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
'ipfs.html.value': 'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const ipfsHash = await resolution.record(
CryptoDomainWithAllRecords,
'ipfs.html.value',
);
expectSpyToBeCalled(eyes);
expect(ipfsHash).toBe('QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu');
});
it('should return verified twitter handle', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0xb66dce2da6afaaa98f2013446dbcb0f4b0ab2842',
owner: '0x499dd6d875787869670900a2130223d85d4f6aa7',
records: {
['validation.social.twitter.username']:
'0x01882395ce631866b76f43535843451444ef4a8ff44db0a9432d5d00658a510512c7519a87c78ba9cad7553e26262ada55c254434a1a3784cd98d06fb4946cfb1b',
['social.twitter.username']: 'Marlene12Bob',
},
},
});
const twitterHandle = await resolution.serviceMap[
NamingServiceName.UNS
].twitter(CryptoDomainWithTwitterVerification);
expectSpyToBeCalled(spies);
expect(twitterHandle).toBe('Marlene12Bob');
});
it('should return NoRecord Resolution error', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {},
},
});
await expectResolutionErrorCode(
resolution.record(CryptoDomainWithAllRecords, 'No.such.record'),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(spies);
}, 20000);
it('should return a valid resolver address', async () => {
const spies = mockAsyncMethods(uns, {
get: {resolver: '0x95AE1515367aa64C462c71e87157771165B1287A'},
});
const resolverAddress = await resolution.resolver(
CryptoDomainWithAllRecords,
);
expectSpyToBeCalled(spies);
expect(resolverAddress).toBe('0x95AE1515367aa64C462c71e87157771165B1287A');
});
it('should return true for supported domain', async () => {
mockAPICalls('uns_domain_exists_test', protocolLink());
expect(await uns.isSupportedDomain('brad.crypto')).toBe(true);
expect(await uns.isSupportedDomain('brad.blockchain')).toBe(true);
expect(await uns.isSupportedDomain('brad.888')).toBe(true);
});
it('should return false for unsupported domain', async () => {
mockAPICalls('uns_domain_exists_test', protocolLink());
expect(await uns.isSupportedDomain('brad.zil')).toBe(false);
expect(await uns.isSupportedDomain('brad.invalid')).toBe(false);
});
it('should not find a resolver address', async () => {
const spies = mockAsyncMethods(uns, {
get: {
owner: '0x0000000000000000000000000000000000000000',
resolver: undefined,
},
});
await expectResolutionErrorCode(
resolution.resolver('unknown-unknown-938388383.crypto'),
ResolutionErrorCode.UnregisteredDomain,
);
expectSpyToBeCalled(spies);
});
it('should throw ResolutionError.UnspecifiedResolver', async () => {
const spies = mockAsyncMethods(uns, {
get: {owner: 'someowneraddress', resolver: NullAddress},
});
await expectResolutionErrorCode(
resolution.resolver(CryptoDomainWithoutResolver),
ResolutionErrorCode.UnspecifiedResolver,
);
expectSpyToBeCalled(spies);
});
describe('.Crypto', () => {
it(`checks the BCH address on ${CryptoDomainWithAllRecords}`, async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['crypto.BCH.address']:
'qzx048ez005q4yhphqu2pylpfc3hy88zzu4lu6q9j8',
},
},
});
const addr = await resolution.addr(CryptoDomainWithAllRecords, 'BCH');
expectSpyToBeCalled(eyes);
expect(addr).toBe('qzx048ez005q4yhphqu2pylpfc3hy88zzu4lu6q9j8');
});
it(`checks the ADA address on ${CryptoDomainWithAllRecords}`, async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['crypto.ADA.address']:
'DdzFFzCqrhssjmxkChyAHE9MdHJkEc4zsZe7jgum6RtGzKLkUanN1kPZ1ipVPBLwVq2TWrhmPsAvArcr47Pp1VNKmZTh6jv8ctAFVCkj',
},
},
});
const addr = await resolution.addr(CryptoDomainWithAllRecords, 'ADA');
expectSpyToBeCalled(eyes);
expect(addr).toBe(
'DdzFFzCqrhssjmxkChyAHE9MdHJkEc4zsZe7jgum6RtGzKLkUanN1kPZ1ipVPBLwVq2TWrhmPsAvArcr47Pp1VNKmZTh6jv8ctAFVCkj',
);
});
describe('.Metadata', () => {
it('should resolve with ipfs stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.html.value']:
'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const ipfsHash = await resolution.ipfsHash(CryptoDomainWithAllRecords);
expectSpyToBeCalled(spies);
expect(ipfsHash).toBe('QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu');
});
it('should resolve with email stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['whois.email.value']: 'johnny@unstoppabledomains.com',
},
},
});
const email = await resolution.email(CryptoDomainWithAllRecords);
expectSpyToBeCalled(spies);
expect(email).toBe('johnny@unstoppabledomains.com');
});
it('should resolve with httpUrl stored on uns', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.redirect_domain.value']: 'google.com',
},
},
});
const httpUrl = await resolution.httpUrl(CryptoDomainWithAllRecords);
expectSpyToBeCalled(eyes);
expect(httpUrl).toBe('google.com');
});
it('should resolve with the gundb chatId stored on uns', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['gundb.username.value']:
'0x8912623832e174f2eb1f59cc3b587444d619376ad5bf10070e937e0dc22b9ffb2e3ae059e6ebf729f87746b2f71e5d88ec99c1fb3c7c49b8617e2520d474c48e1c',
},
},
});
const chatId = await resolution.chatId('brad.crypto');
expectSpyToBeCalled(eyes);
expect(chatId).toBe(
'0x8912623832e174f2eb1f59cc3b587444d619376ad5bf10070e937e0dc22b9ffb2e3ae059e6ebf729f87746b2f71e5d88ec99c1fb3c7c49b8617e2520d474c48e1c',
);
});
it('should throw UnspecifiedResolver for chatId', async () => {
mockAsyncMethods(uns, {
get: {
owner: '0xBD5F5ec7ed5f19b53726344540296C02584A5237',
},
});
await expectResolutionErrorCode(
resolution.chatId(CryptoDomainWithoutResolver),
ResolutionErrorCode.UnspecifiedResolver,
);
});
it('should resolve with the gundb public key stored on uns', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['gundb.public_key.value']:
'pqeBHabDQdCHhbdivgNEc74QO-x8CPGXq4PKWgfIzhY.7WJR5cZFuSyh1bFwx0GWzjmrim0T5Y6Bp0SSK0im3nI',
},
},
});
const publicKey = await resolution.chatPk('brad.crypto');
expectSpyToBeCalled(eyes);
expect(publicKey).toBe(
'pqeBHabDQdCHhbdivgNEc74QO-x8CPGXq4PKWgfIzhY.7WJR5cZFuSyh1bFwx0GWzjmrim0T5Y6Bp0SSK0im3nI',
);
});
it('should error out for gundb public key stored on uns', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x878bC2f3f717766ab69C0A5f9A6144931E61AEd3',
records: {},
},
});
await expectResolutionErrorCode(
resolution.chatPk(CryptoDomainWithoutGunDbRecords),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(eyes);
});
it('should error out for gundb chatId stored on uns', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x878bC2f3f717766ab69C0A5f9A6144931E61AEd3',
records: {},
},
});
await expectResolutionErrorCode(
resolution.chatId(CryptoDomainWithoutGunDbRecords),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(eyes);
});
});
});
describe('.Crypto ProxyReader', () => {
it('should return record by key', async () => {
const eyes = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.html.value']:
'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const ipfsHash = await resolution.record(
CryptoDomainWithAllRecords,
'ipfs.html.value',
);
expectSpyToBeCalled(eyes);
expect(ipfsHash).toBe('QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu');
});
it('should return NoRecord Resolution error when value not found', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x878bC2f3f717766ab69C0A5f9A6144931E61AEd3',
records: {},
},
});
await expectResolutionErrorCode(
resolution.record(CryptoDomainWithAllRecords, 'No.such.record'),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(spies);
});
it('should return a valid resolver address', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {},
},
});
const resolverAddress = await resolution.resolver(
CryptoDomainWithAllRecords,
);
expectSpyToBeCalled(spies);
expect(resolverAddress).toBe(
'0x95AE1515367aa64C462c71e87157771165B1287A',
);
});
it('should return UnregisteredDomain error when owner address not found', async () => {
const spies = mockAsyncMethods(uns, {
get: {owner: NullAddress},
});
await expectResolutionErrorCode(
resolution.resolver('unknown-unknown-938388383.crypto'),
ResolutionErrorCode.UnregisteredDomain,
);
expectSpyToBeCalled(spies);
});
it('should return UnspecifiedResolver error when resolver address not found', async () => {
const spies = mockAsyncMethods(uns, {
get: {owner: '0x000000000000000000000000000000000000dead'},
});
await expectResolutionErrorCode(
resolution.resolver(CryptoDomainWithoutResolver),
ResolutionErrorCode.UnspecifiedResolver,
);
expectSpyToBeCalled(spies);
});
skipItInLive('should work without any configs', async () => {
resolution = new Resolution();
const eyes = mockAsyncMethods(
resolution.serviceMap[NamingServiceName.UNS],
{
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['crypto.ETH.address']:
'0xe7474D07fD2FA286e7e0aa23cd107F8379085037',
},
},
},
);
const address = await resolution.addr(CryptoDomainWithAllRecords, 'eth');
expectSpyToBeCalled(eyes);
expect(address).toBe('0xe7474D07fD2FA286e7e0aa23cd107F8379085037');
});
describe('.Metadata', () => {
it('should resolve with ipfs stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.html.value']:
'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const ipfsHash = await resolution.ipfsHash(CryptoDomainWithAllRecords);
expectSpyToBeCalled(spies);
expect(ipfsHash).toBe('QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu');
});
it('should resolve with email stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {['whois.email.value']: 'johnny@unstoppabledomains.com'},
},
});
const email = await resolution.email(CryptoDomainWithAllRecords);
expectSpyToBeCalled(spies);
expect(email).toBe('johnny@unstoppabledomains.com');
});
it.skip('should resolve with httpUrl stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.redirect_domain.value']: 'https://unstoppabledomains.com/',
},
},
});
const httpUrl = await resolution.httpUrl(CryptoDomainWithAllRecords);
expectSpyToBeCalled(spies);
expect(httpUrl).toBe('https://unstoppabledomains.com/');
});
it('should resolve with the gundb chatId stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['gundb.username.value']:
'0x8912623832e174f2eb1f59cc3b587444d619376ad5bf10070e937e0dc22b9ffb2e3ae059e6ebf729f87746b2f71e5d88ec99c1fb3c7c49b8617e2520d474c48e1c',
},
},
});
const chatId = await resolution.chatId('brad.crypto');
expectSpyToBeCalled(spies);
expect(chatId).toBe(
'0x8912623832e174f2eb1f59cc3b587444d619376ad5bf10070e937e0dc22b9ffb2e3ae059e6ebf729f87746b2f71e5d88ec99c1fb3c7c49b8617e2520d474c48e1c',
);
});
it('should throw UnspecifiedResolver for chatId', async () => {
mockAsyncMethods(uns, {
get: {
owner: '0x000000000000000000000000000000000000dead',
records: {},
resolver: NullAddress,
},
});
await expectResolutionErrorCode(
resolution.chatId(CryptoDomainWithoutResolver),
ResolutionErrorCode.UnspecifiedResolver,
);
});
it('should resolve with the gundb public key stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['gundb.public_key.value']:
'pqeBHabDQdCHhbdivgNEc74QO-x8CPGXq4PKWgfIzhY.7WJR5cZFuSyh1bFwx0GWzjmrim0T5Y6Bp0SSK0im3nI',
},
},
});
const publicKey = await resolution.chatPk('brad.crypto');
expectSpyToBeCalled(spies);
expect(publicKey).toBe(
'pqeBHabDQdCHhbdivgNEc74QO-x8CPGXq4PKWgfIzhY.7WJR5cZFuSyh1bFwx0GWzjmrim0T5Y6Bp0SSK0im3nI',
);
});
it('should error out for gundb public key stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {},
},
});
await expectResolutionErrorCode(
resolution.chatPk(CryptoDomainWithoutGunDbRecords),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(spies);
});
it('should error out for gundb chatId stored on uns', async () => {
const spies = mockAsyncMethods(uns, {
get: {
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {},
},
});
await expectResolutionErrorCode(
resolution.chatId(CryptoDomainWithoutGunDbRecords),
ResolutionErrorCode.RecordNotFound,
);
expectSpyToBeCalled(spies);
});
});
});
describe('.Hashing', () => {
describe('.Namehash', () => {
it('supports root node', async () => {
expect(resolution.namehash('crypto')).toEqual(
'0x0f4a10a4f46c288cea365fcf45cccf0e9d901b945b9829ccdb54c10dc3cb7a6f',
);
});
it('starts with -', async () => {
expect(resolution.namehash('-hello.crypto')).toBe(
'0xc4ad028bcae9b201104e15f872d3e85b182939b06829f75a128275177f2ff9b2',
);
});
it('ends with -', async () => {
expect(resolution.namehash('hello-.crypto')).toBe(
'0x82eaa6ef14e438940bfd7747e0e4c4fec42af20cee28ddd0a7d79f52b1c59b72',
);
});
it('starts and ends with -', async () => {
expect(resolution.namehash('-hello-.crypto')).toBe(
'0x90cc1963ff09ce95ee2dbb3830df4f2115da9756e087a50283b3e65f6ffe2a4e',
);
});
it('should throw UnregisteredDomain', async () => {
const eyes = mockAsyncMethods(uns, {
get: {owner: NullAddress},
});
await expectResolutionErrorCode(
resolution.record('unregistered.crypto', 'crypto.ETH.address'),
ResolutionErrorCode.UnregisteredDomain,
);
expectSpyToBeCalled(eyes);
});
});
});
describe('.registryAddress', () => {
it('should return cns registry address', async () => {
mockAPICalls('uns_registry_address_tests', protocolLink());
const registryAddress = await uns.registryAddress(
'udtestdev-265f8f.crypto',
);
expect(registryAddress).toBe(
UnsConfig.networks[4].contracts.CNSRegistry.address,
);
});
//todo Replace the domain with existed test domain ending on .888
skipItInLive('should return uns registry address', async () => {
mockAPICalls('uns_registry_address_tests', protocolLink());
const registryAddress = await uns.registryAddress('some-domain.888');
expect(registryAddress).toBe(
UnsConfig.networks[4].contracts.UNSRegistry.address,
);
});
it('should throw error if tld is not supported', async () => {
mockAPICalls('uns_registry_address_tests', protocolLink());
await expectResolutionErrorCode(
() => uns.registryAddress('some-domain.zil'),
ResolutionErrorCode.UnsupportedDomain,
);
});
it('should throw error if tld does not exist', async () => {
mockAPICalls('uns_registry_address_tests', protocolLink());
await expectResolutionErrorCode(
() => uns.registryAddress('some-domain.unknown'),
ResolutionErrorCode.UnregisteredDomain,
);
});
});
describe('.isRegistered', () => {
it('should return true', async () => {
const spies = mockAsyncMethods(uns, {
get: {
owner: '0x58cA45E932a88b2E7D0130712B3AA9fB7c5781e2',
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.html.value']:
'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const isRegistered = await uns.isRegistered('brad.crypto');
expectSpyToBeCalled(spies);
expect(isRegistered).toBe(true);
});
it('should return false', async () => {
const spies = mockAsyncMethods(uns, {
get: {
owner: '',
resolver: '',
records: {},
},
});
const isRegistered = await uns.isRegistered(
'thisdomainisdefinitelynotregistered123.crypto',
);
expectSpyToBeCalled(spies);
expect(isRegistered).toBe(false);
});
});
describe('.isAvailable', () => {
it('should return false', async () => {
const spies = mockAsyncMethods(uns, {
get: {
owner: '0x58cA45E932a88b2E7D0130712B3AA9fB7c5781e2',
resolver: '0x95AE1515367aa64C462c71e87157771165B1287A',
records: {
['ipfs.html.value']:
'QmQ38zzQHVfqMoLWq2VeiMLHHYki9XktzXxLYTWXt8cydu',
},
},
});
const isAvailable = await uns.isAvailable('brad.crypto');
expectSpyToBeCalled(spies);
expect(isAvailable).toBe(false);
});
it('should return true', async () => {
const spies = mockAsyncMethods(uns, {
get: {
owner: '',
resolver: '',
records: {},
},
});
const isAvailable = await uns.isAvailable(
'thisdomainisdefinitelynotregistered123.crypto',
);
expectSpyToBeCalled(spies);
expect(isAvailable).toBe(true);
});
});
describe('#namehash', () => {
it('supports options', async () => {
expect(resolution.namehash('operadingo4.crypto')).toEqual(
'0x70f542f09763d3ab404a6d87f6a2fad7d49f01b09c44064b4227d165ead5cf25',
);
expect(
resolution.namehash('operadingo4.crypto', {prefix: false}),
).toEqual(
'70f542f09763d3ab404a6d87f6a2fad7d49f01b09c44064b4227d165ead5cf25',
);
expect(
resolution.namehash('operadingo4.crypto', {format: 'dec'}),
).toEqual(
'51092378573785850370557709888128643877973998831507731627523713553233928900389',
);
});
});
describe('Providers', () => {
it('should throw error when FetchProvider throws Error', async () => {
const url = protocolLink();
const provider = new FetchProvider(NamingServiceName.UNS, url);
resolution = new Resolution({
sourceConfig: {uns: {url, provider, network: 'rinkeby'}},
});
jest.spyOn(Networking, 'fetch').mockRejectedValue(new Error('error_up'));
await expect(
resolution.record(CryptoDomainWithAllRecords, 'No.such.record'),
).rejects.toEqual(new Error('error_up'));
});
});
describe('.tokenURI', () => {
it('should return token URI', async () => {
const spies = mockAsyncMethods(uns.readerContract, {
call: [
'https://metadata.staging.unstoppabledomains.com/metadata/brad.crypto',
],
});
const uri = await resolution.tokenURI('brad.crypto');
expectSpyToBeCalled(spies);
expect(uri).toEqual(
'https://metadata.staging.unstoppabledomains.com/metadata/brad.crypto',
);
});
it('should throw error', async () => {
const spies = mockAsyncMethods(uns.readerContract, {
call: new ResolutionError(ResolutionErrorCode.ServiceProviderError, {
providerMessage: 'execution reverted',
}),
});
await expectResolutionErrorCode(
() => resolution.tokenURI('fakedomainthatdoesnotexist.crypto'),
ResolutionErrorCode.UnregisteredDomain,
);
expectSpyToBeCalled(spies);
});
skipItInLive('should throw the same internal error', async () => {
const spies = mockAsyncMethods(uns.readerContract, {
call: new ResolutionError(ResolutionErrorCode.ServiceProviderError),
});
await expectResolutionErrorCode(
() => resolution.tokenURI('fakedomainthatdoesnotexist.crypto'),
ResolutionErrorCode.ServiceProviderError,
);
expectSpyToBeCalled(spies);
});
});
describe('.tokenURIMetadata', () => {
it('should return token metadata', async () => {
const testMeta: TokenUriMetadata = liveData.bradCryptoMetadata;
const unsSpies = mockAsyncMethods(uns.readerContract, {
call: ['https://metadata.unstoppabledomains.com/metadata/brad.crypto'],
});
const fetchSpies = mockAsyncMethods(Networking, {
fetch: {
ok: true,
json: () => testMeta,
},
});
const metadata = await resolution.tokenURIMetadata('brad.crypto');
expectSpyToBeCalled(unsSpies);
expectSpyToBeCalled(fetchSpies);
expect(metadata).toEqual(testMeta);
});
});
describe('.unhash', () => {
it('should unhash token', async () => {
const testMeta: TokenUriMetadata = liveData.bradCryptoMetadata;
mockAPICalls('unhash', protocolLink());
const domain = await resolution.unhash(
'0x756e4e998dbffd803c21d23b06cd855cdc7a4b57706c95964a37e24b47c10fc9',
NamingServiceName.UNS,
);
expect(domain).toEqual(testMeta.name);
});
skipItInLive('should throw error if hash is wrong', async () => {
const provider = new FetchProvider(NamingServiceName.UNS, protocolLink());
resolution = new Resolution({
sourceConfig: {
uns: {
provider,
network: 'mainnet',
},
},
});
const providerSpy = mockAsyncMethods(provider, {
fetchJson: {
jsonrpc: '2.0',
id: '1',
error: {
code: -32600,
message: 'data type size mismatch, expected 32 got 6',
},
},
});
await expectResolutionErrorCode(
() => resolution.unhash('0xdeaddeaddead', NamingServiceName.UNS),
ResolutionErrorCode.ServiceProviderError,
);
expectSpyToBeCalled(providerSpy);
});
it('should throw error if domain is not found', async () => {
const unregisteredhash = resolution.namehash(
'test34230131207328144694.crypto',
);
mockAPICalls('unhash', protocolLink());
await expectResolutionErrorCode(
() => resolution.unhash(unregisteredhash, NamingServiceName.UNS),
ResolutionErrorCode.UnregisteredDomain,
);
});
skipItInLive(
'should throw an error if hash returned from the network is not equal to the hash provided',
async () => {
const someHash = resolution.namehash('test34230131207328144693.crypto');
mockAPICalls('unhash', protocolLink());
await expectResolutionErrorCode(
() => resolution.unhash(someHash, NamingServiceName.UNS),
ResolutionErrorCode.ServiceProviderError,
);
},
);
skipItInLive(
'getStartingBlockFromRegistry shouild return earliest for custom network',
async () => {
resolution = new Resolution({
sourceConfig: {
uns: {
network: 'custom',
url: protocolLink(),
proxyReaderAddress:
UnsConfig.networks[4].contracts.ProxyReader.address,
},
},
});
const someHash = resolution.namehash('test.coin');
// We need to make sure there is no mocks in the queque before we create new ones
nock.cleanAll();
mockAPICalls('unhashGetStartingBlockTest', protocolLink());
await expectResolutionErrorCode(
() => resolution.unhash(someHash, NamingServiceName.UNS),
ResolutionErrorCode.UnregisteredDomain,
);
// If the getStartingBlockFromRegistry function won't return "earliest" then one of the mocks will not be fired
// Giving us an indicator that something has changed in the function output
if (!nock.isDone()) {
throw new Error(
'Not all mocks have been called, getStartingBlockFromRegistry is misbehaving?',
);
}
},
);
it('should return a .wallet domain', async () => {
const walletDomain = 'udtestdev-johnnywallet.wallet';
const hash = resolution.namehash(walletDomain);
mockAPICalls('unhash', protocolLink());
const result = await resolution.unhash(hash, NamingServiceName.UNS);
expect(result).toBe(walletDomain);
});
it('should return a .coin domain', async () => {
const walletDomain = 'udtestdev-johnnycoin.coin';
const hash = resolution.namehash(walletDomain);
mockAPICalls('unhash', protocolLink());
const result = await resolution.unhash(hash, NamingServiceName.UNS);
expect(result).toBe(walletDomain);
});
});
}); | the_stack |
import { expect } from "chai";
import { testSettings } from "../test-config.test";
import { Util } from "../../src/utils/util";
import { Folder } from "../../src/sharepoint/folders";
import { File } from "../../src/sharepoint/files";
import { Item } from "../../src/sharepoint/items";
import { SharingLinkKind, SharingRole } from "../../src/sharepoint/types";
import { Web } from "../../src/sharepoint/webs";
describe("Sharing", () => {
let webAbsUrl = "";
let webRelativeUrl = "";
let web: Web;
before((done) => {
// we need to take some steps to ensure we are operating on the correct web here
// due to the url manipulation in the library for sharing
web = new Web(testSettings.webUrl);
web.select("ServerRelativeUrl", "Url").get().then(u => {
// make sure we have the correct server relative url
webRelativeUrl = u.ServerRelativeUrl;
webAbsUrl = u.Url;
// we need a doc lib with a file and folder in it
web.lists.ensure("SharingTestLib", "Used to test sharing", 101).then(ler => {
// add a file and folder
Promise.all([
ler.list.rootFolder.folders.add("MyTestFolder"),
ler.list.rootFolder.files.add("text.txt", "Some file content!"),
]).then(_ => {
done();
}).catch(_ => {
done();
});
}).catch(_ => {
done();
});
});
});
if (testSettings.enableWebTests) {
describe("can operate on folders", () => {
let folder: Folder = null;
before(() => {
folder = web.getFolderByServerRelativeUrl("/" + Util.combinePaths(webRelativeUrl, "SharingTestLib/MyTestFolder"));
});
// // these tests cover share link
it("Should get a sharing link with default settings.", () => {
return expect(folder.getShareLink())
.to.eventually.be.fulfilled
.and.have.property("sharingLinkInfo")
.and.have.deep.property("Url").that.is.not.null;
});
// it("Should get a sharing link with a specified kind.", () => {
// return expect(folder.getShareLink(SharingLinkKind.AnonymousView))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
// it("Should get a sharing link with a specified kind and expiration.", () => {
// return expect(folder.getShareLink(SharingLinkKind.AnonymousView, Util.dateAdd(new Date(), "day", 5)))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
it("Should allow sharing to a person with default settings.", () => {
return expect(folder.shareWith("c:0(.s|true"))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role.", () => {
return expect(folder.shareWith("c:0(.s|true", SharingRole.Edit))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role and share all content.", () => {
return expect(folder.shareWith("c:0(.s|true", SharingRole.Edit, true))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow for checking of sharing permissions.", () => {
return expect(folder.checkSharingPermissions([{ alias: "c:0(.s|true" }]))
.to.eventually.be.fulfilled;
});
it("Should allow getting Sharing Information.", () => {
return expect(folder.getSharingInformation())
.to.eventually.be.fulfilled;
});
it("Should allow getting Object Sharing Settings.", () => {
return expect(folder.getObjectSharingSettings(true))
.to.eventually.be.fulfilled;
});
it("Should allow unsharing.", () => {
return expect(folder.unshare())
.to.eventually.be.fulfilled;
});
// it("Should allow deleting a link by kind.", () => {
// return expect(folder.getShareLink(SharingLinkKind.AnonymousView).then(_ => {
// return folder.deleteSharingLinkByKind(SharingLinkKind.AnonymousView);
// })).to.eventually.be.fulfilled;
// });
// it("Should allow unsharing a link by kind.", () => {
// return expect(folder.getShareLink(SharingLinkKind.AnonymousView).then(response => {
// return folder.unshareLink(SharingLinkKind.AnonymousView, response.sharingLinkInfo.ShareId);
// })).to.eventually.be.fulfilled;
// });
});
describe("can operate on files", () => {
let file: File = null;
before(() => {
file = web.getFileByServerRelativeUrl("/" + Util.combinePaths(webRelativeUrl, "SharingTestLib/text.txt"));
});
it("Should get a sharing link with default settings.", () => {
return expect(file.getShareLink())
.to.eventually.be.fulfilled
.and.have.property("sharingLinkInfo")
.and.have.deep.property("Url").that.is.not.null;
});
// it("Should get a sharing link with a specified kind.", () => {
// return expect(file.getShareLink(SharingLinkKind.AnonymousView))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
// it("Should get a sharing link with a specified kind and expiration.", () => {
// return expect(file.getShareLink(SharingLinkKind.AnonymousView, Util.dateAdd(new Date(), "day", 5)))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
it("Should allow sharing to a person with default settings.", () => {
return expect(file.shareWith("c:0(.s|true"))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role.", () => {
return expect(file.shareWith("c:0(.s|true", SharingRole.Edit))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role and require sign-in.", () => {
return expect(file.shareWith("c:0(.s|true", SharingRole.View, true))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow for checking of sharing permissions.", () => {
return expect(file.checkSharingPermissions([{ alias: "c:0(.s|true" }]))
.to.eventually.be.fulfilled;
});
it("Should allow getting Sharing Information.", () => {
return expect(file.getSharingInformation())
.to.eventually.be.fulfilled;
});
it("Should allow getting Object Sharing Settings.", () => {
return expect(file.getObjectSharingSettings(true))
.to.eventually.be.fulfilled;
});
it("Should allow unsharing.", () => {
return expect(file.unshare())
.to.eventually.be.fulfilled;
});
// it("Should allow deleting a link by kind.", () => {
// return expect(file.getShareLink(SharingLinkKind.AnonymousView).then(_ => {
// return file.deleteSharingLinkByKind(SharingLinkKind.AnonymousView);
// })).to.eventually.be.fulfilled;
// });
// it("Should allow unsharing a link by kind.", () => {
// return expect(file.getShareLink(SharingLinkKind.AnonymousView).then(response => {
// return file.unshareLink(SharingLinkKind.AnonymousView, response.sharingLinkInfo.ShareId);
// })).to.eventually.be.fulfilled;
// });
});
describe("can operate on items", () => {
let item: Item = null;
before(() => {
item = web.lists.getByTitle("SharingTestLib").items.getById(1);
});
it("Should get a sharing link with default settings.", () => {
return expect(item.getShareLink())
.to.eventually.be.fulfilled
.and.have.property("sharingLinkInfo")
.and.have.deep.property("Url").that.is.not.null;
});
// it("Should get a sharing link with a specified kind.", () => {
// return expect(item.getShareLink(SharingLinkKind.AnonymousView))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
// it("Should get a sharing link with a specified kind and expiration.", () => {
// return expect(item.getShareLink(SharingLinkKind.AnonymousView, Util.dateAdd(new Date(), "day", 5)))
// .to.eventually.be.fulfilled
// .and.have.property("sharingLinkInfo")
// .and.have.deep.property("Url").that.is.not.null;
// });
it("Should allow sharing to a person with default settings.", () => {
return expect(item.shareWith("c:0(.s|true"))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role.", () => {
return expect(item.shareWith("c:0(.s|true", SharingRole.Edit))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role and require sign-in.", () => {
return expect(item.shareWith("c:0(.s|true", SharingRole.View, true))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow sharing to a person with the edit role and require sign-in.", () => {
return expect(item.shareWith("c:0(.s|true", SharingRole.View, true))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow for checking of sharing permissions.", () => {
return expect(item.checkSharingPermissions([{ alias: "c:0(.s|true" }]))
.to.eventually.be.fulfilled;
});
it("Should allow getting Sharing Information.", () => {
return expect(item.getSharingInformation())
.to.eventually.be.fulfilled;
});
it("Should allow getting Object Sharing Settings.", () => {
return expect(item.getObjectSharingSettings(true))
.to.eventually.be.fulfilled;
});
it("Should allow unsharing.", () => {
return expect(item.unshare())
.to.eventually.be.fulfilled;
});
// it("Should allow deleting a link by kind.", () => {
// return expect(item.getShareLink(SharingLinkKind.AnonymousView).then(_ => {
// return item.deleteSharingLinkByKind(SharingLinkKind.AnonymousView);
// })).to.eventually.be.fulfilled;
// });
// it("Should allow unsharing a link by kind.", () => {
// return expect(item.getShareLink(SharingLinkKind.AnonymousView).then(response => {
// return item.unshareLink(SharingLinkKind.AnonymousView, response.sharingLinkInfo.ShareId);
// })).to.eventually.be.fulfilled;
// });
});
describe("can operate on webs", () => {
it("Should allow you to share a web with a person using default settings", () => {
return expect(web.shareWith("c:0(.s|true"))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
it("Should allow you to share an object by url", () => {
return expect(web.shareObject(Util.combinePaths(webAbsUrl, "SharingTestLib/test.txt"), "c:0(.s|true", SharingRole.View))
.to.eventually.be.fulfilled
.and.have.property("ErrorMessage").that.is.null;
});
});
}
}); | the_stack |
import { Draft } from "immer";
import { Store as _Store } from "redux";
import { Graph as _Graph } from "./graph";
import { Action as _Action } from "./action";
import { Logs } from "../logs";
import { default as _ActionType } from "./action_type";
import { Env as _Env } from "./envs";
import {
PersistedProcState as _PersistedProcState,
StatePersistenceKey as _StatePersistenceKey,
STATE_PERSISTENCE_KEYS as _STATE_PERSISTENCE_KEYS,
} from "./persistence";
import { OrgArchiveV1 as _OrgArchiveV1 } from "./archive";
import {
State as _State,
PartialAccountState as _PartialAccountState,
PartialClientState as _PartialClientState,
ProcState as _ProcState,
defaultClientState as _defaultClientState,
defaultAccountState as _defaultAccountState,
defaultProcState as _defaultProcState,
lockedState as _lockedState,
ACCOUNT_STATE_KEYS as _ACCOUNT_STATE_KEYS,
CLIENT_STATE_KEYS as _CLIENT_STATE_KEYS,
PROC_STATE_KEYS as _PROC_STATE_KEYS,
CLIENT_PROC_STATE_KEYS as _CLIENT_PROC_STATE_KEYS,
} from "./state";
import { Crypto, Api, Auth, Model, Rbac } from "../";
import * as z from "zod";
import { Patch } from "rfc6902";
namespace Client {
export import Graph = _Graph;
export import ActionType = _ActionType;
export import Action = _Action;
export import Env = _Env;
export type State = _State;
export type PartialAccountState = _PartialAccountState;
export type PartialClientState = _PartialClientState;
export type ProcState = _ProcState;
export type OrgArchiveV1 = _OrgArchiveV1;
export type PersistedProcState = _PersistedProcState;
export type StatePersistenceKey = _StatePersistenceKey;
export type ReduxStore = _Store<
ProcState,
ActionTypeWithContextMeta<
Action.EnvkeyAction | Action.SuccessAction | Action.FailureAction
>
>;
export const defaultClientState = _defaultClientState;
export const defaultAccountState = _defaultAccountState;
export const defaultProcState = _defaultProcState;
export const lockedState = _lockedState;
export const ACCOUNT_STATE_KEYS = _ACCOUNT_STATE_KEYS;
export const CLIENT_STATE_KEYS = _CLIENT_STATE_KEYS;
export const PROC_STATE_KEYS = _PROC_STATE_KEYS;
export const CLIENT_PROC_STATE_KEYS = _CLIENT_PROC_STATE_KEYS;
export const STATE_PERSISTENCE_KEYS = _STATE_PERSISTENCE_KEYS;
export type ApiClientName = z.infer<typeof ApiClientNameSchema>;
export const ApiClientNameSchema = z.enum(["app", "cli"]);
export type FetchClientName = z.infer<typeof FetchClientNameSchema>;
export const FetchClientNameSchema = z.enum([
"fetch",
"source",
"nodejs",
"ruby",
"python",
"go",
"dotnet",
"webpack",
// "java",
// "php",
// "rust",
// "erlang",
// "elixir",
// "clojure",
// "scala",
// "haskell",
// "julia",
// "r",
// "c",
// "c++",
// "lua",
]);
export const ClientNameSchema = z.union([
ApiClientNameSchema,
FetchClientNameSchema,
z.enum(["core"]), // for actions initiated by the core itself
]);
export type ClientName = z.infer<typeof ClientNameSchema>;
export const ALL_CLIENT_NAMES: ClientName[] = [
"app",
"cli",
"core",
"fetch",
"source",
"nodejs",
"ruby",
"python",
"go",
"dotnet",
"webpack",
];
export const ClientParamsSchema = z.object({
clientName: ClientNameSchema,
clientVersion: z.string(),
clientOs: z.string().optional(),
clientArch: z.string().optional(),
clientOsRelease: z.string().optional(),
});
// 👇 for core proc auth: symmetric encryption/decryption by device key (lives in os credential store)
// passed via user-agent to core process local express server routes
export const CORE_PROC_AUTH_TOKEN =
"envkey-core-process-auth-34e710d499b12d3a0bdb7bf5b3b038ea";
export const CORE_PROC_AGENT_NAME = "EnvKey-Client";
export type ClientParams<ClientNameType extends ClientName = ClientName> =
Omit<z.infer<typeof ClientParamsSchema>, "clientName"> & {
clientName: ClientNameType;
};
export type ClientUserAuth = {
type: "clientUserAuth";
deviceId: string;
deviceName: string;
userId: string;
orgId: string;
orgName: string;
hostUrl: string;
addedAt: number;
lastAuthAt: number;
privkey: Crypto.Privkey;
token?: string;
requiresPassphrase: boolean;
requiresLockout: boolean;
lockoutMs: number | undefined;
externalAuthProviderId?: string;
primaryRegion?: string;
profile?: string;
failoverRegion?: string;
} & Pick<
Model.OrgUser,
"email" | "provider" | "uid" | "firstName" | "lastName"
> &
(
| {
hostType: "cloud";
deploymentTag?: undefined;
}
| {
hostType: "self-hosted";
deploymentTag: string;
internalMode?: boolean;
}
);
export type ClientCliAuth = {
type: "clientCliAuth";
userId: string;
orgId: string;
privkey: Crypto.Privkey;
hostUrl: string;
addedAt: number;
lastAuthAt: number;
} & (
| {
hostType: "cloud";
deploymentTag?: undefined;
}
| {
hostType: "self-hosted";
deploymentTag: string;
}
);
export type PendingSelfHostedDeployment = Omit<
ClientUserAuth,
| "type"
| "deviceId"
| "userId"
| "orgId"
| "token"
| "lastAuthAt"
| "hostType"
| "primaryRegion"
| "profile"
| "failoverRegion"
> & {
type: "pendingSelfHostedDeployment";
hostType: "self-hosted";
subdomain: string;
domain: string;
deploymentTag: string;
codebuildLink: string;
internalMode?: boolean;
primaryRegion: string;
profile: string;
failoverRegion?: string;
};
export type FetchError = {
type: "error";
error: {
message: string;
stack: string;
code: number;
};
};
export type ClientError =
| Api.Net.ErrorResult
| {
type: "clientError";
error: Error;
}
| FetchError;
export type DispatchResult<
T extends
| Client.Action.EnvkeyAction
| Client.Action.SuccessAction
| Client.Action.FailureAction =
| Client.Action.EnvkeyAction
| Client.Action.SuccessAction
| Client.Action.FailureAction
> = {
success: boolean;
resultAction: T;
state: Client.State;
retriedWithUpdatedGraph?: true;
};
export type ActionParams<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
SuccessType = any,
FailureType = ClientError,
DispatchContextType = any,
RootActionType extends Action.EnvkeyAction = ActionType
> =
| ClientActionParams<ActionType, DispatchContextType>
| AsyncClientActionParams<
ActionType,
SuccessType,
FailureType,
DispatchContextType
>
| ApiActionParams<
ActionType,
SuccessType,
FailureType,
DispatchContextType,
RootActionType
>;
export type ClientActionParams<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
DispatchContextType = any
> = {
type: "clientAction";
actionType: ActionType["type"];
skipLocalSocketUpdate?: true;
stateProducer?: StateProducer<ActionType>;
procStateProducer?: ProcStateProducer<ActionType>;
handler?: ActionHandler<ActionType, DispatchContextType>;
};
export type AsyncClientActionParams<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
SuccessType = any,
FailureType = ClientError,
DispatchContextType = any
> = {
type: "asyncClientAction";
actionType: ActionType["type"];
verifyCurrentUser?: true;
skipLocalSocketUpdate?: true;
serialAction?: true;
stateProducer?: StateProducer<ActionType>;
handler?: AsyncActionHandler<
ActionType,
SuccessType,
FailureType,
DispatchContextType
>;
bulkApiDispatcher?: true;
apiActionCreator?: (
payload: ActionType extends { payload: any }
? ActionType["payload"] extends any[]
? ActionType["payload"][0]
: ActionType["payload"]
: any,
state: Client.State,
context: Client.Context
) => Promise<{
action: Omit<
Action.DispatchAction<Api.Action.RequestAction>,
"payload"
> & {
payload: Omit<
Api.Action.RequestAction["payload"],
"envs" | "encryptedByTrustChain"
>;
};
dispatchContext?: DispatchContextType;
}>;
apiSuccessPayloadCreator?: (
apiRes: DispatchResult,
dispatchContext?: DispatchContextType
) => Promise<SuccessType>;
} & AsyncActionMethods<
ActionType,
SuccessType,
FailureType,
DispatchContextType
>;
export type GraphProposer<
ActionType extends Action.EnvkeyAction = Api.Action.GraphAction
> = (
action: ActionType extends Api.Action.GraphAction
? Omit<ActionType, "payload"> & {
payload: Omit<
ActionType["payload"],
"envs" | "encryptedByTrustChain"
>;
}
: ActionType,
state: Client.State,
context: Client.Context
) => (
graphDraft: Draft<Client.Graph.UserGraph>
) => Client.Graph.UserGraph | void;
export type ApiActionParams<
ActionType extends Action.EnvkeyAction = Api.Action.GraphAction,
SuccessType = any,
FailureType = ClientError,
DispatchContextType = any,
RootActionType extends Action.EnvkeyAction = ActionType
> = {
type: "apiRequestAction";
actionType: Api.ActionType;
loggableType?: Logs.LoggableType;
loggableType2?: Logs.LoggableType;
loggableType3?: Logs.LoggableType;
loggableType4?: Logs.LoggableType;
skipProcessRootPubkeyReplacements?: true;
skipProcessRevocationRequests?: true;
skipReencryptPermitted?: true;
serialAction?: true;
refreshActionCreator?: (
requestAction: RootActionType
) => Action.EnvkeyAction;
authenticated?: true;
graphAction?: true;
envsUpdate?: true;
bulkDispatchOnly?: true;
stateProducer?: StateProducer<ActionType>;
graphProposer?: GraphProposer<ActionType>;
encryptedKeysScopeFn?: (
graph: Client.Graph.UserGraph,
action: ActionType
) => Rbac.OrgAccessScope;
} & AsyncActionMethods<
ActionType,
SuccessType,
FailureType,
DispatchContextType
>;
export type Context<DispatchContextType = any> = {
client: ClientParams<"cli" | "app" | "core">;
clientId: string;
accountIdOrCliKey: string | undefined;
hostUrl?: string;
dispatchContext?: DispatchContextType;
store?: ReduxStore;
auth?: Auth.ApiAuthParams;
rootClientAction?: Action.ClientAction;
skipProcessRevocationRequests?: true;
skipWaitForSerialAction?: true;
ipTestOverride?: string;
};
type HandlerContext<
DispatchContextType,
AuthParamsType extends Auth.ApiAuthParams = Auth.ApiAuthParams
> = Context<DispatchContextType> & {
authParams?: AuthParamsType;
};
export type ActionTypeWithContextMeta<
ActionType,
DispatchContextType = any
> = ActionType & {
meta: (ActionType extends { meta: {} }
? ActionType["meta"] & Context<DispatchContextType>
: Context<DispatchContextType>) & {
tempId: string;
};
};
export type StateProducer<
ActionType extends
| Action.EnvkeyAction
| Action.SuccessAction
| Action.FailureAction = Action.EnvkeyAction,
DispatchContextType = any
> = (
draft: Draft<State>,
action: ActionTypeWithContextMeta<ActionType, DispatchContextType>
) => Draft<State> | void;
export type ProcStateProducer<
ActionType extends
| Action.EnvkeyAction
| Action.SuccessAction
| Action.FailureAction = Action.EnvkeyAction,
DispatchContextType = any
> = (
draft: Draft<ProcState>,
action: ActionTypeWithContextMeta<ActionType, DispatchContextType>
) => Draft<ProcState> | void;
export type ActionHandler<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
DispatchContextType = any
> = (
state: State,
action: ActionType,
context: HandlerContext<DispatchContextType>
) => Promise<void>;
export type AsyncActionHandler<
ActionType extends Action.EnvkeyAction = Action.ClientAction,
SuccessType = any,
FailureType = ClientError,
DispatchContextType = any
> = (
state: State,
action: ActionType,
dispatchParams: {
context: HandlerContext<DispatchContextType>;
dispatchSuccess: (
payload: SuccessType,
context: HandlerContext<DispatchContextType>
) => Promise<DispatchResult>;
dispatchFailure: (
payload: FailureType,
context: HandlerContext<DispatchContextType>
) => Promise<DispatchResult>;
}
) => Promise<DispatchResult>;
export type SuccessHandler<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
SuccessType = any,
DispatchContextType = any
> = (
state: State,
action: ActionType,
payload: SuccessType,
context: HandlerContext<DispatchContextType>
) => Promise<void>;
export type FailureHandler<
ActionType extends Action.EnvkeyAction = Action.EnvkeyAction,
FailureType = ClientError,
DispatchContextType = any
> = (
state: State,
action: ActionType,
payload: FailureType,
context: HandlerContext<DispatchContextType>
) => Promise<void>;
type FailureMethods<
ActionType extends Action.EnvkeyAction,
FailureType = ClientError,
DispatchContextType = any
> = {
failureStateProducer?: StateProducer<
Action.FailureAction<ActionType, FailureType>,
DispatchContextType
>;
failureHandler?: FailureHandler<
ActionType,
FailureType,
DispatchContextType
>;
};
export type AsyncActionMethods<
ActionType extends Action.EnvkeyAction,
SuccessType = any,
FailureType = ClientError,
DispatchContextType = any
> = {
successAccountIdFn?: (payload: SuccessType) => string | undefined;
successStateProducer?: StateProducer<
Action.SuccessAction<ActionType, SuccessType>,
DispatchContextType
>;
successHandler?: SuccessHandler<
ActionType,
SuccessType,
DispatchContextType
>;
endStateProducer?: StateProducer<
| Action.SuccessAction<ActionType, SuccessType>
| Action.FailureAction<ActionType, FailureType>,
DispatchContextType
>;
} & FailureMethods<ActionType, FailureType, DispatchContextType>;
export type GeneratedEnvkeyResult = {
keyableParentId: string;
envkeyIdPart: string;
encryptionKey: string;
};
export type GeneratedInviteResult = {
user: Pick<
Model.OrgUser,
| "id"
| "email"
| "firstName"
| "lastName"
| "provider"
| "uid"
| "externalAuthProviderId"
| "orgRoleId"
>;
appUserGrants?: Pick<Model.AppUserGrant, "appId" | "appRoleId">[];
} & {
identityHash: string;
encryptionKey: string;
};
export type GeneratedDeviceGrantResult = {
identityHash: string;
encryptionKey: string;
granteeId: string;
createdAt: number;
};
export type GeneratedCliUserResult = {
user: Pick<Model.CliUser, "name" | "orgRoleId">;
appUserGrants?: Pick<Model.AppUserGrant, "appId" | "appRoleId">[];
cliKey: string;
};
export type GeneratedRecoveryKey = {
encryptionKey: string;
};
type Version = string;
export type AvailableUpgrade = {
latest: Version;
releaseNotes: Record<Version, string>;
};
export interface ExternalAuthSetupPayload {
authType: "accept_invite" | "accept_device_grant";
emailToken: string;
encryptionToken: string;
externalAuthProviderId: string;
loadActionType:
| Client.ActionType.LOAD_DEVICE_GRANT
| Client.ActionType.LOAD_INVITE;
orgId: string;
}
export type PendingInvite = Pick<
Api.Net.ApiParamTypes["CreateInvite"],
"user" | "appUserGrants" | "userGroupIds" | "scim"
>;
export type LocalSocketMessage =
| {
type: "closing";
}
| {
type: "update";
}
| {
type: "diffs";
diffs: Patch;
};
export type CoreDispatchResult = Omit<DispatchResult, "state"> & {
diffs: Patch;
status: number;
};
}
export default Client; | the_stack |
import { SearchConfig } from '@twilio-labs/serverless-api/dist/utils';
import { ServerlessFunctionSignature } from '@twilio-labs/serverless-runtime-types/types';
import type {
LocalDevelopmentServer as LDS,
ServerConfig,
} from '@twilio/runtime-handler/dist/dev-runtime/server';
import bodyParser from 'body-parser';
import chokidar from 'chokidar';
import express, {
Express,
NextFunction,
Request as ExpressRequest,
Response as ExpressResponse,
} from 'express';
import userAgentMiddleware from 'express-useragent';
import debounce from 'lodash.debounce';
import nocache from 'nocache';
import path from 'path';
import { StartCliConfig } from '../config/start';
import { printRouteInfo } from '../printers/start';
import { wrapErrorInHtml } from '../utils/error-html';
import { getDebugFunction, logger, LoggingLevel } from '../utils/logger';
import { writeOutput } from '../utils/output';
import { requireFromProject } from '../utils/requireFromProject';
import { createLogger } from './internal/request-logger';
import { getRouteMap } from './internal/route-cache';
import { getFunctionsAndAssets, RouteInfo } from './internal/runtime-paths';
import {
constructGlobalScope,
functionPathToRoute,
functionToRoute,
} from './route';
const debug = getDebugFunction('twilio-run:server');
const DEFAULT_PORT = process.env.PORT || 3000;
const RELOAD_DEBOUNCE_MS = 250;
const DEFAULT_BODY_SIZE_LAMBDA = '6mb';
function loadTwilioFunction(fnPath: string): ServerlessFunctionSignature {
return require(fnPath).handler;
}
function requireCacheCleaner(
req: ExpressRequest,
res: ExpressResponse,
next: NextFunction
) {
debug('Deleting require cache');
Object.keys(require.cache).forEach((key) => {
// Entries in the cache that end with .node are compiled binaries, deleting
// those has unspecified results, so we keep them.
// Entries in the cache that include "twilio-run" are part of this module
// or its dependencies, so don't need to be cleared.
if (!(key.endsWith('.node') || key.includes('twilio-run'))) {
delete require.cache[key];
}
});
next();
}
async function findRoutes(config: StartCliConfig): Promise<RouteInfo> {
const searchConfig: SearchConfig = {};
if (config.functionsFolderName) {
searchConfig.functionsFolderNames = [config.functionsFolderName];
}
if (config.assetsFolderName) {
searchConfig.assetsFolderNames = [config.assetsFolderName];
}
return getFunctionsAndAssets(config.baseDir, searchConfig);
}
function configureWatcher(config: StartCliConfig, server: LDS) {
const watcher = chokidar.watch(
[
path.join(
config.baseDir,
config.functionsFolderName
? `/(${config.functionsFolderName})/**/*)`
: '/(functions|src)/**/*.js'
),
path.join(
config.baseDir,
config.assetsFolderName
? `/(${config.assetsFolderName})/**/*)`
: '/(assets|static)/**/*'
),
],
{
ignoreInitial: true,
}
);
const reloadRoutes = async () => {
const routes = await findRoutes(config);
server.update(routes);
};
// Debounce so we don't needlessly reload when multiple files are changed
const debouncedReloadRoutes = debounce(reloadRoutes, RELOAD_DEBOUNCE_MS);
watcher
.on('add', (path) => {
debug(`Reloading Routes: add @ ${path}`);
debouncedReloadRoutes();
})
.on('unlink', (path) => {
debug(`Reloading Routes: unlink @ ${path}`);
debouncedReloadRoutes();
});
// Clean the watcher up when exiting.
process.on('exit', () => watcher.close());
}
export async function createLocalDevelopmentServer(
port: string | number = DEFAULT_PORT,
config: StartCliConfig
): Promise<Express> {
try {
const { LocalDevelopmentServer } = requireFromProject(
config.baseDir,
'@twilio/runtime-handler/dev'
) as { LocalDevelopmentServer: LDS };
const routes = await findRoutes(config);
const server = new LocalDevelopmentServer(port, {
inspect: config.inspect,
baseDir: config.baseDir,
env: config.env,
port: config.port,
url: config.url,
detailedLogs: config.detailedLogs,
live: config.live,
logs: config.logs,
legacyMode: config.legacyMode,
appName: config.appName,
forkProcess: config.forkProcess,
logger: logger,
routes: routes,
enableDebugLogs: logger.config.level === LoggingLevel.debug,
});
server.on('request-log', (logMessage: string) => {
writeOutput(logMessage);
});
server.on('updated-routes', async (config: ServerConfig) => {
await printRouteInfo(config);
});
configureWatcher(config, server);
return server.getApp();
} catch (err) {
debug(
'Failed to load server from @twilio/runtime-handler/dev. Falling back to built-in.'
);
return createServer(port, config);
}
}
/** @deprecated */
export async function createServer(
port: string | number = DEFAULT_PORT,
config: StartCliConfig
): Promise<Express> {
config = {
...config,
url: config.url || `http://localhost:${port}`,
baseDir: config.baseDir || process.cwd(),
};
debug('Starting server with config: %p', config);
const app = express();
app.use(userAgentMiddleware.express());
app.use(
bodyParser.urlencoded({ extended: false, limit: DEFAULT_BODY_SIZE_LAMBDA })
);
app.use(bodyParser.json({ limit: DEFAULT_BODY_SIZE_LAMBDA }));
app.get('/favicon.ico', (req, res) => {
res.redirect(
'https://www.twilio.com/marketing/bundles/marketing/img/favicons/favicon.ico'
);
});
if (config.logs) {
app.use(createLogger(config));
}
if (config.live) {
app.use(nocache());
app.use(requireCacheCleaner);
}
if (config.legacyMode) {
process.env.TWILIO_FUNCTIONS_LEGACY_MODE = config.legacyMode
? 'true'
: undefined;
debug('Legacy mode enabled');
app.use('/assets/*', (req, res, next) => {
req.path = req.path.replace('/assets/', '/');
next();
});
}
let routeMap = await getRouteMap(config);
if (config.live) {
const watcher = chokidar.watch(
[
path.join(config.baseDir, '/(functions|src)/**/*.js'),
path.join(config.baseDir, '/(assets|static)/**/*'),
],
{
ignoreInitial: true,
}
);
const reloadRoutes = async () => {
routeMap = await getRouteMap(config);
await printRouteInfo(config);
};
// Debounce so we don't needlessly reload when multiple files are changed
const debouncedReloadRoutes = debounce(reloadRoutes, RELOAD_DEBOUNCE_MS);
watcher
.on('add', (path) => {
debug(`Reloading Routes: add @ ${path}`);
debouncedReloadRoutes();
})
.on('unlink', (path) => {
debug(`Reloading Routes: unlink @ ${path}`);
debouncedReloadRoutes();
});
// Clean the watcher up when exiting.
process.on('exit', () => watcher.close());
}
constructGlobalScope(config);
app.set('port', port);
app.all(
'/*',
(req: ExpressRequest, res: ExpressResponse, next: NextFunction) => {
let routeInfo = routeMap.get(req.path);
if (!routeInfo && req.path === '/') {
// In production we automatically fall back to the contents of /assets/index.html
debug('Falling back to /assets/index.html');
routeInfo = routeMap.get('/assets/index.html');
}
if (req.method === 'OPTIONS' && routeInfo) {
res.set({
'access-control-allow-origin': '*',
'access-control-allow-headers':
'Accept, Authorization, Content-Type, If-Match, If-Modified-Since, If-None-Match, If-Unmodified-Since, User-Agent',
'access-control-allow-methods': 'GET, POST, OPTIONS',
'access-control-expose-headers': 'ETag',
'access-control-max-age': '86400',
'access-control-allow-credentials': true,
'content-type': 'text/plain; charset=UTF-8',
'content-length': '0',
});
res.status(204).end();
return;
}
if (routeInfo && routeInfo.type === 'function') {
const functionPath = routeInfo.filePath;
try {
if (!functionPath) {
throw new Error('Missing function path');
}
if (config.forkProcess) {
functionPathToRoute(functionPath, config)(req, res, next);
} else {
debug('Load & route to function at "%s"', functionPath);
const twilioFunction = loadTwilioFunction(functionPath);
if (typeof twilioFunction !== 'function') {
return res
.status(404)
.send(
`Could not find a "handler" function in file ${functionPath}`
);
}
functionToRoute(twilioFunction, config, functionPath)(
req,
res,
next
);
}
} catch (err) {
debug('Failed to retrieve function. %O', err);
if (err.code === 'ENOENT') {
res.status(404).send(`Could not find function ${functionPath}`);
} else {
res.status(500).send(wrapErrorInHtml(err, functionPath));
}
}
} else if (routeInfo && routeInfo.type === 'asset') {
if (routeInfo.filePath) {
if (routeInfo.access === 'private') {
res.status(403).send('This asset has been marked as private');
} else {
res.sendFile(routeInfo.filePath);
}
} else {
res.status(404).send('Could not find asset');
}
} else {
res.status(404).send('Could not find requested resource');
}
}
);
return app;
}
export async function runServer(
port: number | string = DEFAULT_PORT,
config: StartCliConfig
): Promise<Express> {
const app = await createServer(port, config);
return new Promise((resolve) => {
app.listen(port);
resolve(app);
});
} | the_stack |
import {
arrayWith,
countResources,
expect as expectCDK,
haveResourceLike,
ResourcePart,
stringLike,
} from '@aws-cdk/assert';
import { CfnLaunchConfiguration } from '@aws-cdk/aws-autoscaling';
import {
AmazonLinuxImage,
ExecuteFileOptions,
InstanceType,
MachineImage,
SecurityGroup,
SubnetType,
Vpc,
} from '@aws-cdk/aws-ec2';
import * as iam from '@aws-cdk/aws-iam';
import { ILogGroup } from '@aws-cdk/aws-logs';
import { Bucket } from '@aws-cdk/aws-s3';
import { Asset } from '@aws-cdk/aws-s3-assets';
import { StringParameter } from '@aws-cdk/aws-ssm';
import * as cdk from '@aws-cdk/core';
import {
DeploymentInstance,
DeploymentInstanceProps,
} from '../lib/deployment-instance';
import { resourceTagMatcher, testConstructTags } from './tag-helpers';
const DEFAULT_CONSTRUCT_ID = 'DeploymentInstance';
/**
* Machine image that spies on the following user data methods:
*
* * `.addOnExitCommands`
* * `.addExecuteFileCommand`
*/
class AmazonLinuxWithUserDataSpy extends AmazonLinuxImage {
public getImage(scope: cdk.Construct) {
const result = super.getImage(scope);
jest.spyOn(result.userData, 'addOnExitCommands');
jest.spyOn(result.userData, 'addExecuteFileCommand');
return result;
}
}
describe('DeploymentInstance', () => {
let app: cdk.App;
let depStack: cdk.Stack;
let stack: cdk.Stack;
let vpc: Vpc;
let target: DeploymentInstance;
beforeAll(() => {
// GIVEN
app = new cdk.App();
depStack = new cdk.Stack(app, 'DepStack');
vpc = new Vpc(depStack, 'VPC');
});
describe('defaults', () => {
beforeAll(() => {
// GIVEN
stack = new cdk.Stack(app, 'DefaultsStack');
target = new DeploymentInstance(stack, DEFAULT_CONSTRUCT_ID, {
vpc,
});
});
describe('Auto-Scaling Group', () => {
// Only one ASG is deployed. This is an anchor for the tests that follow. Each test is independent and not
// guaranteed to match on the same resource in the CloudFormation template. Having a test that asserts a single
// ASG makes these assertions linked
test('deploys a single Auto-Scaling Group', () => {
// THEN
expectCDK(stack).to(countResources('AWS::AutoScaling::AutoScalingGroup', 1));
});
test('MaxSize is 1', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', {
MaxSize: '1',
}));
});
test('MinSize is 1', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', {
MinSize: '1',
}));
});
test('uses private subnets', () => {
// GIVEN
const privateSubnetIDs = vpc.selectSubnets({ subnetType: SubnetType.PRIVATE }).subnetIds;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', {
VPCZoneIdentifier: arrayWith(
...stack.resolve(privateSubnetIDs),
),
}));
});
test('waits 15 minutes for one signal', () => {
// THEN
expectCDK(stack).to(haveResourceLike(
'AWS::AutoScaling::AutoScalingGroup',
{
CreationPolicy: {
ResourceSignal: {
Count: 1,
Timeout: 'PT15M',
},
},
},
ResourcePart.CompleteDefinition,
));
});
test('sets replacing update policy', () => {
// THEN
expectCDK(stack).to(haveResourceLike(
'AWS::AutoScaling::AutoScalingGroup',
{
UpdatePolicy: {
AutoScalingReplacingUpdate: {
WillReplace: true,
},
AutoScalingScheduledAction: {
IgnoreUnmodifiedGroupSizeProperties: true,
},
},
},
ResourcePart.CompleteDefinition,
));
});
test('uses Launch Configuration', () => {
// GIVEN
const launchConfig = target.node.findChild('ASG').node.findChild('LaunchConfig') as CfnLaunchConfiguration;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', {
LaunchConfigurationName: stack.resolve(launchConfig.ref),
}));
});
});
describe('Launch Configuration', () => {
// Only one ASG is deployed. This is an anchor for the tests that follow. Each test is independent and not
// guaranteed to match on the same resource in the CloudFormation template. Having a test that asserts a single
// ASG makes these assertions linked
test('deploys a single Launch Configuration', () => {
// THEN
expectCDK(stack).to(countResources('AWS::AutoScaling::LaunchConfiguration', 1));
});
test('uses latest Amazon Linux machine image', () => {
// GIVEN
const amazonLinux = MachineImage.latestAmazonLinux();
const imageId: { Ref: string } = stack.resolve(amazonLinux.getImage(stack)).imageId;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', {
ImageId: imageId,
}));
});
test('uses t3.small', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', {
InstanceType: 't3.small',
}));
});
test('Uses created Security Group', () => {
// GIVEN
const securityGroup = (target
.node.findChild('ASG')
.node.findChild('InstanceSecurityGroup')
) as SecurityGroup;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', {
SecurityGroups: [
stack.resolve(securityGroup.securityGroupId),
],
}));
});
test('depends on policy', () => {
// GIVEN
const policy = (
target
.node.findChild('ASG')
.node.findChild('InstanceRole')
.node.findChild('DefaultPolicy')
.node.defaultChild
) as iam.CfnPolicy;
// THEN
expectCDK(stack).to(haveResourceLike(
'AWS::AutoScaling::LaunchConfiguration',
{
DependsOn: arrayWith(
stack.resolve(policy.logicalId),
),
},
ResourcePart.CompleteDefinition,
));
});
});
describe('Security Group', () => {
test('creates Security Group in the desired VPC', () => {
// THEN
expectCDK(stack).to(countResources('AWS::EC2::SecurityGroup', 1));
expectCDK(stack).to(haveResourceLike('AWS::EC2::SecurityGroup', {
VpcId: stack.resolve(vpc.vpcId),
}));
});
});
describe('ASG IAM role', () => {
let instanceRole: iam.CfnRole;
beforeAll(() => {
// GIVEN
instanceRole = (
target
.node.findChild('ASG')
.node.findChild('InstanceRole')
.node.defaultChild
) as iam.CfnRole;
});
test('creates an instance profile', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::InstanceProfile', {
Roles: [
{ Ref: stack.getLogicalId(instanceRole) },
],
}));
});
test('creates a role that can be assumed by EC2', () => {
// GIVEN
const servicePrincipal = new iam.ServicePrincipal('ec2.amazonaws.com');
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Role', {
AssumeRolePolicyDocument: {
Statement: [
{
Action: 'sts:AssumeRole',
Effect: 'Allow',
Principal: {
Service: stack.resolve(servicePrincipal.policyFragment.principalJson).Service[0],
},
},
],
},
}));
});
test('can signal to CloudFormation', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: 'cloudformation:SignalResource',
Effect: 'Allow',
Resource: { Ref: 'AWS::StackId' },
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can write to the log group', () => {
// GIVEN
const logGroup = target.node.findChild(`${DEFAULT_CONSTRUCT_ID}LogGroup`) as ILogGroup;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: [
'logs:CreateLogStream',
'logs:PutLogEvents',
],
Effect: 'Allow',
Resource: stack.resolve(logGroup.logGroupArn),
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can fetch the CloudWatch Agent install script', () => {
// GIVEN
const cloudWatchAgentScriptAsset = (
target
.node.findChild('CloudWatchConfigurationScriptAsset')
) as Asset;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: [
's3:GetObject*',
's3:GetBucket*',
's3:List*',
],
Effect: 'Allow',
Resource: stack.resolve([
cloudWatchAgentScriptAsset.bucket.bucketArn,
cloudWatchAgentScriptAsset.bucket.arnForObjects('*'),
]),
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can fetch the CloudWatch Agent configuration file SSM parameter', () => {
// GIVEN
const cloudWatchConfigSsmParam = (
target
.node.findChild('StringParameter')
) as StringParameter;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: [
'ssm:DescribeParameters',
'ssm:GetParameters',
'ssm:GetParameter',
'ssm:GetParameterHistory',
],
Effect: 'Allow',
Resource: stack.resolve(cloudWatchConfigSsmParam.parameterArn),
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can fetch the CloudWatch Agent installer from S3', () => {
// GIVEN
const cloudWatchAgentInstallerBucket = Bucket.fromBucketArn(depStack, 'CloudWatchAgentInstallerBucket', `arn:aws:s3:::amazoncloudwatch-agent-${stack.region}` );
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: [
's3:GetObject*',
's3:GetBucket*',
's3:List*',
],
Effect: 'Allow',
Resource: stack.resolve([
cloudWatchAgentInstallerBucket.bucketArn,
cloudWatchAgentInstallerBucket.arnForObjects('*'),
]),
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can fetch GPG installer from RFDK dependencies S3 bucket', () => {
// GIVEN
const rfdkExternalDepsBucket = Bucket.fromBucketArn(depStack, 'RfdkExternalDependenciesBucket', `arn:aws:s3:::rfdk-external-dependencies-${stack.region}` );
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: [
's3:GetObject*',
's3:GetBucket*',
's3:List*',
],
Effect: 'Allow',
Resource: stack.resolve([
rfdkExternalDepsBucket.bucketArn,
rfdkExternalDepsBucket.arnForObjects('*'),
]),
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('can scale the Auto-Scaling Group', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: 'autoscaling:UpdateAutoScalingGroup',
Condition: {
// This tag is added by RFDK to scope down the permissions of the policy for least-privilege
StringEquals: { 'autoscaling:ResourceTag/resourceLogicalId': cdk.Names.uniqueId(target) },
},
Effect: 'Allow',
Resource: '*',
},
// The instance determines its Auto-Scaling Group by reading the tag created on the instance by the EC2
// Auto-Scaling service
{
Action: 'ec2:DescribeTags',
Effect: 'Allow',
Resource: '*',
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
});
describe('CloudWatch Agent config SSM parameter', () => {
test('configures log group', () => {
// GIVEN
const logGroup = target.node.findChild(`${DEFAULT_CONSTRUCT_ID}LogGroup`) as ILogGroup;
// THEN
expectCDK(stack).to(haveResourceLike('AWS::SSM::Parameter', {
Type: 'String',
Value: {
'Fn::Join': [
'',
arrayWith(
'{"logs":{"logs_collected":{"files":{"collect_list":[{"log_group_name":"',
stack.resolve(logGroup.logGroupName),
),
],
},
}));
});
test('configures cloud-init log', () => {
// THEN
expectCDK(stack).to(haveResourceLike('AWS::SSM::Parameter', {
Type: 'String',
Value: {
'Fn::Join': [
'',
arrayWith(
stringLike('*"log_stream_name":"cloud-init-output-{instance_id}","file_path":"/var/log/cloud-init-output.log",*'),
),
],
},
}));
});
});
describe('Tags resources with RFDK meta-data', () => {
testConstructTags({
constructName: 'DeploymentInstance',
createConstruct: () => {
return stack;
},
resourceTypeCounts: {
'AWS::EC2::SecurityGroup': 1,
'AWS::IAM::Role': 1,
'AWS::AutoScaling::AutoScalingGroup': 1,
'AWS::SSM::Parameter': 1,
},
});
});
// RFDK adds the resourceLogicalId tag to the Auto-Scaling Group in order to scope down the permissions of the
// IAM policy given to the EC2 instance profile so that only that ASG can be scaled by the instance.
test('Tagging for self-termination', () => {
// THEN
const matcher = resourceTagMatcher('AWS::AutoScaling::AutoScalingGroup', 'resourceLogicalId', cdk.Names.uniqueId(target));
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', matcher));
});
});
describe('User Data', () => {
beforeAll(() => {
// GIVEN
stack = new cdk.Stack(app, 'UserDataStack');
// WHEN
target = new DeploymentInstance(stack, 'DeploymentInstanceNew', {
vpc,
// a hack to be able to spy on the user data's "addOnExitCommand" and "addExecuteFileCommand" methods.
machineImage: new AmazonLinuxWithUserDataSpy(),
});
});
test('configures self-termination', () =>{
// THEN
expect(target.userData.addOnExitCommands).toHaveBeenCalledWith(
'TOKEN=$(curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30" 2> /dev/null)',
'INSTANCE="$(curl -s -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/instance-id 2> /dev/null)"',
`ASG="$(aws --region ${stack.region} ec2 describe-tags --filters "Name=resource-id,Values=\${INSTANCE}" "Name=key,Values=aws:autoscaling:groupName" --query "Tags[0].Value" --output text)"`,
`aws --region ${stack.region} autoscaling update-auto-scaling-group --auto-scaling-group-name \${ASG} --min-size 0 --max-size 0 --desired-capacity 0`,
);
});
test('configures CloudWatch Agent', () =>{
// GIVEN
const spy = target.userData.addExecuteFileCommand as jest.Mock<void, [ExecuteFileOptions]>;
const cloudWatchConfigSsmParam = (
target
.node.findChild('StringParameter')
) as StringParameter;
// THEN
// Should have been called
expect(spy.mock.calls.length).toBeGreaterThanOrEqual(1);
// The first call...
const executeFileOptions = spy.mock.calls[0][0];
// Should have been called with arguments
const args = executeFileOptions.arguments;
expect(args).not.toBeUndefined();
const splitArgs = args!.split(' ');
// Should have three arguments
expect(splitArgs).toHaveLength(3);
// Specify the flag to install the CloudWatch Agent
expect(splitArgs[0]).toEqual('-i');
// Should pass the region
expect(stack.resolve(splitArgs[1])).toEqual(stack.resolve(stack.region));
// Should pass the SSM parameter containing the CloudWatch Agent configuration
expect(stack.resolve(splitArgs[2])).toEqual(stack.resolve(cloudWatchConfigSsmParam.parameterName));
});
});
describe('Custom::LogRetention.LogGroupName', () => {
beforeEach(() => {
// We need a clean construct tree, because the tests use the same construct ID
app = new cdk.App();
depStack = new cdk.Stack(app, 'DepStack');
vpc = new Vpc(depStack, 'VPC');
stack = new cdk.Stack(app, 'Stack');
});
// GIVEN
test.each<[
{
// optional logging props of DeploymentInstance
logGroupName?: string,
logGroupPrefix?: string,
},
// expected final log group name
string,
]>([
[
{},
// defaults expected final log group name
`/renderfarm/${DEFAULT_CONSTRUCT_ID}`,
],
[
{ logGroupName: 'foo' },
// expected final log group name
'/renderfarm/foo',
],
[
{
logGroupPrefix: 'logGroupPrefix',
},
// expected final log group name
`logGroupPrefix${DEFAULT_CONSTRUCT_ID}`,
],
[
{
logGroupName: 'logGroupName',
logGroupPrefix: 'logGroupPrefix',
},
// expected final log group name
'logGroupPrefixlogGroupName',
],
])('%s => %s', ({ logGroupName, logGroupPrefix }, expectedLogGroupName) => {
// WHEN
new DeploymentInstance(stack, DEFAULT_CONSTRUCT_ID, {
vpc,
logGroupName,
logGroupProps: logGroupPrefix ? { logGroupPrefix } : undefined,
});
// THEN
expectCDK(stack).to(haveResourceLike('Custom::LogRetention', {
LogGroupName: expectedLogGroupName,
}));
});
});
// GIVEN
test('uses specified instance type', () => {
// GIVEN
const instanceType = new InstanceType('c5.large');
// We want an isolated stack to ensure expectCDK is only searching resources
// synthesized by the specific DeploymentInstance stack
stack = new cdk.Stack(app, 'InstanceTypeStack');
// WHEN
new DeploymentInstance(stack, DEFAULT_CONSTRUCT_ID, {
vpc,
instanceType,
});
// THEN
expectCDK(stack).to(haveResourceLike('AWS::AutoScaling::LaunchConfiguration', {
InstanceType: instanceType.toString(),
}));
});
describe('.selfTermination = false', () => {
beforeAll(() => {
// GIVEN
stack = new cdk.Stack(app, 'SelfTerminationDisabledStack');
// Spy on user data method calls
const machineImage = new AmazonLinuxWithUserDataSpy();
const deploymentInstanceProps: DeploymentInstanceProps = {
vpc,
selfTerminate: false,
machineImage,
};
// WHEN
target = new DeploymentInstance(stack, DEFAULT_CONSTRUCT_ID, deploymentInstanceProps);
});
test('does not add on-exit commands', () => {
// THEN
expect(target.userData.addOnExitCommands).not.toHaveBeenCalledWith(expect.arrayContaining([
expect.stringMatching(/\baws\s+.*\bautoscaling\s+update-auto-scaling-group/),
]));
});
test('is not granted IAM permissions to scale the Auto-Scaling Group', () => {
// GIVEN
const instanceRole = (
target
.node.findChild('ASG')
.node.findChild('InstanceRole')
.node.defaultChild
) as iam.CfnRole;
// THEN
expectCDK(stack).notTo(haveResourceLike('AWS::IAM::Policy', {
PolicyDocument: {
Statement: arrayWith(
{
Action: 'autoscaling:UpdateAutoScalingGroup',
Condition: {
// This tag is added by RFDK to scope down the permissions of the policy for least-privilege
StringEquals: { 'autoscaling:ResourceTag/resourceLogicalId': cdk.Names.uniqueId(target) },
},
Effect: 'Allow',
Resource: '*',
},
// The instance determines its Auto-Scaling Group by reading the tag created on the instance by the EC2
// Auto-Scaling service
{
Action: 'ec2:DescribeTags',
Effect: 'Allow',
Resource: '*',
},
),
},
Roles: [
stack.resolve(instanceRole.ref),
],
}));
});
test('does not tag for self-termination', () => {
// THEN
const matcher = resourceTagMatcher('AWS::AutoScaling::AutoScalingGroup', 'resourceLogicalId', cdk.Names.uniqueId(target));
// THEN
expectCDK(stack).notTo(haveResourceLike('AWS::AutoScaling::AutoScalingGroup', matcher));
});
});
// GIVEN
describe('.executionTimeout is specified', () => {
const executionTimeout = cdk.Duration.minutes(30);
beforeAll(() => {
// GIVEN
// Use a clean stack to not pollute other stacks with resources
stack = new cdk.Stack(app, 'ExecutionTimeout');
const deploymentInstanceProps: DeploymentInstanceProps = {
vpc,
executionTimeout,
};
// WHEN
new DeploymentInstance(stack, DEFAULT_CONSTRUCT_ID, deploymentInstanceProps);
});
// THEN
test('AWS::AutoScaling::AutoScalingGroup creation policy signal timeout is set accordingly', () => {
expectCDK(stack).to(haveResourceLike(
'AWS::AutoScaling::AutoScalingGroup',
{
CreationPolicy: {
ResourceSignal: {
Count: 1,
Timeout: executionTimeout.toIsoString(),
},
},
},
ResourcePart.CompleteDefinition,
));
});
});
}); | the_stack |
import { yamlConfig, JWT_SECRET } from "@config/app"
import * as jwt from "jsonwebtoken"
import { RateLimitConfig } from "@domain/rate-limit"
import {
UserLoginIpRateLimiterExceededError,
UserLoginPhoneRateLimiterExceededError,
UserPhoneCodeAttemptIpRateLimiterExceededError,
UserPhoneCodeAttemptPhoneMinIntervalRateLimiterExceededError,
UserPhoneCodeAttemptPhoneRateLimiterExceededError,
} from "@domain/rate-limit/errors"
import { ApolloClient, NormalizedCacheObject } from "@apollo/client/core"
import USER_REQUEST_AUTH_CODE from "./mutations/user-request-auth-code.gql"
import USER_LOGIN from "./mutations/user-login.gql"
import MAIN from "./queries/main.gql"
import {
clearAccountLocks,
clearLimiters,
resetUserPhoneCodeAttemptIp,
resetUserPhoneCodeAttemptPhone,
resetUserLoginIpRateLimits,
resetUserLoginPhoneRateLimits,
resetUserPhoneCodeAttemptPhoneMinIntervalLimits,
startServer,
killServer,
createApolloClient,
defaultTestClientConfig,
PID,
} from "test/helpers"
let correctCode: PhoneCode,
apolloClient: ApolloClient<NormalizedCacheObject>,
disposeClient: () => void,
serverPid: PID
const { phone, code } = yamlConfig.test_accounts[9]
beforeAll(async () => {
correctCode = `${code}` as PhoneCode
serverPid = await startServer()
;({ apolloClient, disposeClient } = createApolloClient(defaultTestClientConfig()))
})
beforeEach(async () => {
await clearLimiters()
await clearAccountLocks()
})
afterAll(async () => {
disposeClient()
await killServer(serverPid)
})
describe("graphql", () => {
describe("main query", () => {
it("returns valid data", async () => {
const { data } = await apolloClient.query({
query: MAIN,
variables: { hasToken: false },
})
expect(data.globals).toBeTruthy()
expect(data.mobileVersions).toBeTruthy()
expect(data.quizQuestions).toBeTruthy()
expect(data.globals.nodesIds).toEqual(expect.arrayContaining([expect.any(String)]))
expect(data.mobileVersions).toEqual(
expect.arrayContaining([
expect.objectContaining({
currentSupported: expect.any(Number),
minSupported: expect.any(Number),
platform: expect.any(String),
}),
]),
)
expect(data.quizQuestions).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: expect.any(String),
earnAmount: expect.any(Number),
}),
]),
)
})
})
describe("userRequestAuthCode", () => {
const mutation = USER_REQUEST_AUTH_CODE
it("success with a valid phone", async () => {
const input = { phone }
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userRequestAuthCode).toEqual(
expect.objectContaining({ success: true }),
)
})
it("returns error for invalid phone", async () => {
const message = "Invalid value for Phone"
let input = { phone: "+123" }
let result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userRequestAuthCode.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
input = { phone: "abcd" }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userRequestAuthCode.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
input = { phone: "" }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userRequestAuthCode.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
})
it("rate limits too many phone requests", async () => {
await testPhoneCodeAttemptPerPhoneMinInterval(mutation)
await testPhoneCodeAttemptPerPhone(mutation)
await testPhoneCodeAttemptPerIp(mutation)
})
})
describe("userLogin", () => {
const mutation = USER_LOGIN
it("returns a jwt token for a valid phone/code", async () => {
const input = { phone, code: correctCode }
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin).toHaveProperty("authToken")
const token = jwt.verify(result.data.userLogin.authToken, `${JWT_SECRET}`)
expect(token).toHaveProperty("uid")
expect(token).toHaveProperty("network")
expect(token).toHaveProperty("iat")
})
it("returns error for invalid phone", async () => {
let phone = "+19999999999"
let message = "Invalid or incorrect phone code entered."
let input = { phone, code: correctCode }
let result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
phone = "+1999"
message = "Invalid value for Phone"
input = { phone, code: correctCode }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
phone = "abcd"
input = { phone, code: correctCode }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
phone = ""
input = { phone, code: correctCode }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
})
it("returns error for invalid code", async () => {
let message = "Invalid or incorrect phone code entered."
let input = { phone, code: "113566" }
let result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
message = "Invalid value for OneTimeAuthCode"
input = { phone, code: "abcdef" }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
input = { phone, code: "" }
result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message })]),
)
})
it("rate limits too many invalid login requests by IP, wrong code", async () => {
const args = {
input: { phone, code: "000000" as PhoneCode },
expectedMessage: "Invalid or incorrect phone code entered.",
mutation,
}
await testRateLimitLoginByPhone(args)
await testRateLimitLoginByIp(args)
})
it.skip("rate limits too many invalid login requests by IP, invalid code", async () => {
const args = {
input: { phone, code: "<invalid>" as PhoneCode },
expectedMessage: "Invalid value for OneTimeAuthCode",
mutation,
}
await testRateLimitLoginByPhone(args)
await testRateLimitLoginByIp(args)
})
it("rate limits too many invalid login requests by IP, wrong phone", async () => {
const args = {
input: { phone: "+19999999999" as PhoneNumber, code: correctCode },
expectedMessage: "Invalid or incorrect phone code entered.",
mutation,
}
await testRateLimitLoginByPhone(args)
await testRateLimitLoginByIp(args)
})
it.skip("rate limits too many invalid login requests by IP, invalid phone", async () => {
const args = {
input: { phone: "<invalid>" as PhoneNumber, code: correctCode },
expectedMessage: "Invalid value for Phone",
mutation,
}
await testRateLimitLoginByPhone(args)
await testRateLimitLoginByIp(args)
})
})
})
const testPhoneCodeAttemptPerPhoneMinInterval = async (mutation) => {
// Fetch limiter config
const {
limits: { points },
error,
} = RateLimitConfig.requestPhoneCodeAttemptPerPhoneMinInterval
// Reset limiter
const reset = await resetUserPhoneCodeAttemptPhoneMinIntervalLimits(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
// Exhaust limiter
const input = { phone }
for (let i = 0; i < points; i++) {
{
const reset = await resetUserPhoneCodeAttemptPhone(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
{
const reset = await resetUserPhoneCodeAttemptIp(undefined as unknown as IpAddress)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
const {
data: {
userRequestAuthCode: { success },
},
} = await apolloClient.mutate({ mutation, variables: { input } })
expect(success).toBeTruthy()
}
// Check limiter is exhausted
const { errors } = await apolloClient.mutate({ mutation, variables: { input } })
expect(new error()).toBeInstanceOf(
UserPhoneCodeAttemptPhoneMinIntervalRateLimiterExceededError,
)
expect(errors && errors[0].message).toMatch(new RegExp(`.*${error.name}.*`))
}
const testPhoneCodeAttemptPerPhone = async (mutation) => {
// Fetch limiter config
const {
limits: { points },
error,
} = RateLimitConfig.requestPhoneCodeAttemptPerPhone
// Reset limiter
const reset = await resetUserPhoneCodeAttemptPhone(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
// Exhaust limiter
const input = { phone }
for (let i = 0; i < points; i++) {
{
const reset = await resetUserPhoneCodeAttemptPhoneMinIntervalLimits(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
{
const reset = await resetUserPhoneCodeAttemptIp(undefined as unknown as IpAddress)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
const {
data: {
userRequestAuthCode: { success },
},
} = await apolloClient.mutate({ mutation, variables: { input } })
expect(success).toBeTruthy()
}
// Check limiter is exhausted
const expectedErrorMessage =
"Too many phone code attempts, please wait for a while and try again."
const { errors } = await apolloClient.mutate({ mutation, variables: { input } })
expect(new error()).toBeInstanceOf(UserPhoneCodeAttemptPhoneRateLimiterExceededError)
expect(errors && errors[0].message).toBe(expectedErrorMessage)
}
const testPhoneCodeAttemptPerIp = async (mutation) => {
// Fetch limiter config
const {
limits: { points },
error,
} = RateLimitConfig.requestPhoneCodeAttemptPerIp
// Reset limiter
const reset = await resetUserPhoneCodeAttemptIp(undefined as unknown as IpAddress)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
// Exhaust limiter
const input = { phone }
for (let i = 0; i < points; i++) {
{
const reset = await resetUserPhoneCodeAttemptPhoneMinIntervalLimits(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
{
const reset = await resetUserPhoneCodeAttemptPhone(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
}
const {
data: {
userRequestAuthCode: { success },
},
} = await apolloClient.mutate({ mutation, variables: { input } })
expect(success).toBeTruthy()
}
// Check limiter is exhausted
const expectedErrorMessage =
"Too many phone code attempts on same network, please wait for a while and try again."
const { errors } = await apolloClient.mutate({ mutation, variables: { input } })
expect(new error()).toBeInstanceOf(UserPhoneCodeAttemptIpRateLimiterExceededError)
expect(errors && errors[0].message).toBe(expectedErrorMessage)
}
const testRateLimitLoginByPhone = async ({
input,
expectedMessage,
mutation,
}: {
input: { phone: PhoneNumber; code: PhoneCode }
expectedMessage: string
mutation: DocumentNode
}) => {
const { phone } = input
// Fetch limiter config
const {
limits: { points },
error,
} = RateLimitConfig.failedLoginAttemptPerPhone
// Reset limiter
const reset = await resetUserLoginPhoneRateLimits(phone)
expect(reset).not.toBeInstanceOf(Error)
if (reset instanceof Error) return reset
// Exhaust limiter
for (let i = 0; i < points; i++) {
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message: expectedMessage })]),
)
}
// Check limiter is exhausted
const expectedErrorMessage =
"Too many login attempts, please wait for a while and try again."
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(new error()).toBeInstanceOf(UserLoginPhoneRateLimiterExceededError)
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message: expectedErrorMessage })]),
)
}
const testRateLimitLoginByIp = async ({
input,
expectedMessage,
mutation,
}: {
input: { phone: PhoneNumber; code: PhoneCode }
expectedMessage: string
mutation: DocumentNode
}) => {
const ip = undefined as unknown as IpAddress
const { phone } = input
// Fetch limiter config
const {
limits: { points },
error,
} = RateLimitConfig.failedLoginAttemptPerIp
// Reset limiter
const resetIp = await resetUserLoginIpRateLimits(ip)
expect(resetIp).not.toBeInstanceOf(Error)
if (resetIp instanceof Error) return resetIp
// Exhaust limiter
for (let i = 0; i < points; i++) {
const resetPhone = await resetUserLoginPhoneRateLimits(phone)
expect(resetPhone).not.toBeInstanceOf(Error)
if (resetPhone instanceof Error) return resetPhone
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message: expectedMessage })]),
)
}
// Check limiter is exhausted
const resetPhone = await resetUserLoginPhoneRateLimits(phone)
expect(resetPhone).not.toBeInstanceOf(Error)
if (resetPhone instanceof Error) return resetPhone
const expectedErrorMessage =
"Too many login attempts on same network, please wait for a while and try again."
const result = await apolloClient.mutate({ mutation, variables: { input } })
expect(new error()).toBeInstanceOf(UserLoginIpRateLimiterExceededError)
expect(result.data.userLogin.errors).toEqual(
expect.arrayContaining([expect.objectContaining({ message: expectedErrorMessage })]),
)
} | the_stack |
import classNames from 'classnames'
import copy from 'copy-to-clipboard'
import ContentCopyIcon from 'mdi-react/ContentCopyIcon'
import DotsHorizontalIcon from 'mdi-react/DotsHorizontalIcon'
import FileDocumentIcon from 'mdi-react/FileDocumentIcon'
import React, { useState, useCallback } from 'react'
import { Tooltip } from '@sourcegraph/branded/src/components/tooltip/Tooltip'
import { Link } from '@sourcegraph/shared/src/components/Link'
import { pluralize } from '@sourcegraph/shared/src/util/strings'
import { Timestamp } from '../../components/time/Timestamp'
import { GitCommitFields } from '../../graphql-operations'
import { eventLogger } from '../../tracking/eventLogger'
import { DiffModeSelector } from '../commit/DiffModeSelector'
import { DiffMode } from '../commit/RepositoryCommitPage'
import { GitCommitNodeByline } from './GitCommitNodeByline'
export interface GitCommitNodeProps {
node: GitCommitFields
/** An optional additional CSS class name to apply to this element. */
className?: string
/** Display in a single line (more compactly). */
compact?: boolean
/** Expand the commit message body. */
expandCommitMessageBody?: boolean
/** Hide the button to expand the commit message body. */
hideExpandCommitMessageBody?: boolean
/** Show the full 40-character SHA and parents on their own row. */
showSHAAndParentsRow?: boolean
/** Fragment to show at the end to the right of the SHA. */
afterElement?: React.ReactFragment
/** Determine the git diff visualization UI */
diffMode?: DiffMode
/** Handler for change the diff mode */
onHandleDiffMode?: (mode: DiffMode) => void
}
/** Displays a Git commit. */
export const GitCommitNode: React.FunctionComponent<GitCommitNodeProps> = ({
node,
afterElement,
className,
compact,
expandCommitMessageBody,
hideExpandCommitMessageBody,
showSHAAndParentsRow,
diffMode,
onHandleDiffMode,
}) => {
const [showCommitMessageBody, setShowCommitMessageBody] = useState<boolean>(false)
const [flashCopiedToClipboardMessage, setFlashCopiedToClipboardMessage] = useState<boolean>(false)
const toggleShowCommitMessageBody = useCallback((): void => {
eventLogger.log('CommitBodyToggled')
setShowCommitMessageBody(!showCommitMessageBody)
}, [showCommitMessageBody])
const copyToClipboard = useCallback((oid): void => {
eventLogger.log('CommitSHACopiedToClipboard')
copy(oid)
setFlashCopiedToClipboardMessage(true)
Tooltip.forceUpdate()
setTimeout(() => {
setFlashCopiedToClipboardMessage(false)
Tooltip.forceUpdate()
}, 1500)
}, [])
const messageElement = (
<div
className={classNames('git-commit-node__message flex-grow-1', compact && 'git-commit-node__message-small')}
>
<Link to={node.canonicalURL} className="git-commit-node__message-subject" title={node.message}>
{node.subject}
</Link>
{node.body && !hideExpandCommitMessageBody && !expandCommitMessageBody && (
<button
type="button"
className="btn btn-secondary btn-sm git-commit-node__message-toggle"
onClick={toggleShowCommitMessageBody}
>
<DotsHorizontalIcon className="icon-inline" />
</button>
)}
{compact && (
<small className="text-muted git-commit-node__message-timestamp">
<Timestamp noAbout={true} date={node.committer ? node.committer.date : node.author.date} />
</small>
)}
</div>
)
const commitMessageBody =
expandCommitMessageBody || showCommitMessageBody ? (
<div className="w-100">
<pre className="git-commit-node__message-body">{node.body}</pre>
</div>
) : undefined
const bylineElement = (
<GitCommitNodeByline
className="d-flex text-muted git-commit-node__byline"
author={node.author}
committer={node.committer}
// TODO compact needs to be always a boolean
compact={Boolean(compact)}
messageElement={messageElement}
commitMessageBody={commitMessageBody}
/>
)
const shaDataElement = showSHAAndParentsRow && (
<div className="w-100 git-commit-node__sha-and-parents">
<div className="d-flex mb-1">
<span className="git-commit-node__sha-and-parents-label">Commit:</span>
<code className="git-commit-node__sha-and-parents-sha">
{node.oid}{' '}
<button
type="button"
className="btn btn-icon git-commit-node__sha-and-parents-copy"
onClick={() => copyToClipboard(node.oid)}
data-tooltip={flashCopiedToClipboardMessage ? 'Copied!' : 'Copy full SHA'}
>
<ContentCopyIcon className="icon-inline" />
</button>
</code>
</div>
<div className="align-items-center d-flex">
{node.parents.length > 0 ? (
<>
<span className="git-commit-node__sha-and-parents-label">
{node.parents.length === 1
? 'Parent'
: `${node.parents.length} ${pluralize('parent', node.parents.length)}`}
:
</span>{' '}
{node.parents.map((parent, index) => (
<div className="d-flex" key={index}>
<Link className="git-commit-node__sha-and-parents-parent" to={parent.url}>
<code>{parent.oid}</code>
</Link>
<button
type="button"
className="btn btn-icon git-commit-node__sha-and-parents-copy"
onClick={() => copyToClipboard(parent.oid)}
data-tooltip={flashCopiedToClipboardMessage ? 'Copied!' : 'Copy full SHA'}
>
<ContentCopyIcon className="icon-inline" />
</button>
</div>
))}
</>
) : (
'(root commit)'
)}
</div>
</div>
)
const diffModeSelector = (): JSX.Element | null => {
if (diffMode && onHandleDiffMode) {
return <DiffModeSelector onHandleDiffMode={onHandleDiffMode} diffMode={diffMode} small={true} />
}
return null
}
const viewFilesCommitElement = node.tree && (
<div className="d-flex justify-content-between">
<Link
className="btn btn-sm btn-outline-secondary align-center d-inline-flex"
to={node.tree.canonicalURL}
data-tooltip="View files at this commit"
>
<FileDocumentIcon className="icon-inline mr-1" />
'View files in commit'
</Link>
{diffModeSelector()}
</div>
)
const oidElement = <code className="git-commit-node__oid">{node.abbreviatedOID}</code>
return (
<div key={node.id} className={classNames('git-commit-node', compact && 'git-commit-node--compact', className)}>
<>
{!compact ? (
<>
<div className="w-100 d-flex justify-content-between align-items-start">
<div className="git-commit-node__signature">{bylineElement}</div>
<div className="git-commit-node__actions">
{!showSHAAndParentsRow && (
<div>
<div className="btn-group btn-group-sm mr-2" role="group">
<Link
className="btn btn-secondary"
to={node.canonicalURL}
data-tooltip="View this commit"
>
<strong>{oidElement}</strong>
</Link>
<button
type="button"
className="btn btn-secondary"
onClick={() => copyToClipboard(node.oid)}
data-tooltip={
flashCopiedToClipboardMessage ? 'Copied!' : 'Copy full SHA'
}
>
<ContentCopyIcon className="icon-inline small" />
</button>
</div>
{node.tree && (
<Link
className="btn btn-sm btn-secondary"
to={node.tree.canonicalURL}
data-tooltip="View files at this commit"
>
<FileDocumentIcon className="icon-inline mr-1" />
</Link>
)}
</div>
)}
{shaDataElement}
</div>
</div>
<div>{showSHAAndParentsRow ? viewFilesCommitElement : shaDataElement}</div>
</>
) : (
<div>
<div className="w-100 d-flex justify-content-between align-items-center flex-wrap-reverse">
{bylineElement}
{messageElement}
<Link to={node.canonicalURL}>{oidElement}</Link>
{afterElement}
</div>
{commitMessageBody}
</div>
)}
</>
</div>
)
} | the_stack |
import { assert, AssertionError } from "chai";
import * as sinon from "sinon";
import "chai/register-should";
import { createReadStream } from "fs";
import { DefaultHttpClient } from "../lib/defaultHttpClient";
import { RestError } from "../lib/restError";
import { isNode } from "../lib/util/utils";
import { WebResource, HttpRequestBody, TransferProgressEvent } from "../lib/webResource";
import { getHttpMock, HttpMockFacade } from "./mockHttp";
import { TestFunction } from "mocha";
import { CommonResponse } from "../lib/fetchHttpClient";
const nodeIt = (isNode ? it : it.skip) as TestFunction;
function getAbortController(): AbortController {
let controller: AbortController;
if (typeof AbortController === "function") {
controller = new AbortController();
} else {
const AbortControllerPonyfill = require("abortcontroller-polyfill/dist/cjs-ponyfill")
.AbortController;
controller = new AbortControllerPonyfill();
}
return controller;
}
describe("defaultHttpClient", function () {
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
let httpMock: HttpMockFacade;
beforeEach(() => {
httpMock = getHttpMock();
httpMock.setup();
});
afterEach(() => httpMock.teardown());
after(() => httpMock.teardown());
function getMockedHttpClient(): DefaultHttpClient {
const httpClient = new DefaultHttpClient();
const fetchMock = httpMock.getFetch();
if (fetchMock) {
sinon.stub(httpClient, "fetch").callsFake(async (input, init) => {
const response = await fetchMock(input, init);
return (response as unknown) as CommonResponse;
});
}
return httpClient;
}
it("should return a response instead of throwing for awaited 404", async function () {
const resourceUrl = "/nonexistent";
httpMock.get(resourceUrl, async () => {
return { status: 404 };
});
const request = new WebResource(resourceUrl, "GET");
const httpClient = getMockedHttpClient();
const response = await httpClient.sendRequest(request);
response.status.should.equal(404);
});
it("should allow canceling requests", async function () {
const resourceUrl = `/fileupload`;
httpMock.post(resourceUrl, async () => {
await sleep(10000);
assert.fail();
return { status: 201 };
});
const controller = getAbortController();
const veryBigPayload = "very long string";
const request = new WebResource(
resourceUrl,
"POST",
veryBigPayload,
undefined,
undefined,
true,
undefined,
controller.signal
);
const client = getMockedHttpClient();
const promise = client.sendRequest(request);
controller.abort();
try {
await promise;
assert.fail("");
} catch (err) {
err.should.not.be.instanceof(AssertionError);
}
});
nodeIt("should not overwrite a user-provided cookie (nodejs only)", async function () {
// Cookie is only allowed to be set by the browser based on an actual response Set-Cookie header
httpMock.get("http://my.fake.domain/set-cookie", {
status: 200,
headers: {
"Set-Cookie": "data=123456",
},
});
httpMock.get("http://my.fake.domain/cookie", async (_url, _method, _body, headers) => {
return {
status: 200,
headers: headers,
};
});
const client = getMockedHttpClient();
const request1 = new WebResource("http://my.fake.domain/set-cookie");
const response1 = await client.sendRequest(request1);
response1.headers.get("Set-Cookie")!.should.equal("data=123456");
const request2 = new WebResource("http://my.fake.domain/cookie");
const response2 = await client.sendRequest(request2);
response2.headers.get("Cookie")!.should.equal("data=123456");
const request3 = new WebResource("http://my.fake.domain/cookie", "GET", undefined, undefined, {
Cookie: "data=abcdefg",
});
const response3 = await client.sendRequest(request3);
response3.headers.get("Cookie")!.should.equal("data=abcdefg");
});
it("should allow canceling multiple requests with one token", async function () {
httpMock.post("/fileupload", async () => {
await sleep(1000);
assert.fail();
return { status: 201 };
});
const controller = getAbortController();
const buf = "Very large string";
const requests = [
new WebResource(
"/fileupload",
"POST",
buf,
undefined,
undefined,
true,
undefined,
controller.signal
),
new WebResource(
"/fileupload",
"POST",
buf,
undefined,
undefined,
true,
undefined,
controller.signal
),
];
const client = getMockedHttpClient();
const promises = requests.map((r) => client.sendRequest(r));
controller.abort();
// Ensure each promise is individually rejected
for (const promise of promises) {
try {
await promise;
assert.fail();
} catch (err) {
err.should.not.be.instanceof(AssertionError);
}
}
});
describe("should report upload and download progress", () => {
type Notified = { notified: boolean };
const listener = (operationStatus: Notified, ev: TransferProgressEvent) => {
operationStatus.notified = true;
if (typeof ProgressEvent !== "undefined") {
ev.should.not.be.instanceof(ProgressEvent);
}
ev.loadedBytes.should.be.a("Number");
};
it("for simple bodies", async function () {
httpMock.post("/fileupload", async (_url, _method, _body) => {
return {
status: 251,
body: body.repeat(9).substring(0, 200),
headers: { "Content-Length": "200" },
};
});
const upload: Notified = { notified: false };
const download: Notified = { notified: false };
const body = "Very large string to upload";
const request = new WebResource(
"/fileupload",
"POST",
body,
undefined,
undefined,
false,
undefined,
undefined,
0,
(ev) => listener(upload, ev),
(ev) => listener(download, ev)
);
const client = getMockedHttpClient();
const response = await client.sendRequest(request);
response.should.exist;
response.status.should.equal(251);
upload.notified.should.be.true;
download.notified.should.be.true;
});
it("for blob or stream bodies", async function () {
let payload: HttpRequestBody;
if (isNode) {
payload = () => createReadStream(__filename);
} else {
payload = new Blob([new Uint8Array(1024 * 1024)]);
}
const size = isNode ? payload.toString().length : undefined;
httpMock.post("/bigfileupload", async (_url, _method, _body) => {
return {
status: 250,
body: payload,
headers: { "Content-Type": "text/javascript", "Content-length": size },
};
});
const upload: Notified = { notified: false };
const download: Notified = { notified: false };
const request = new WebResource(
"/bigfileupload",
"POST",
payload,
undefined,
undefined,
true,
undefined,
undefined,
0,
(ev) => listener(upload, ev),
(ev) => listener(download, ev)
);
const client = getMockedHttpClient();
const response = await client.sendRequest(request);
response.status.should.equal(250);
if (response.blobBody) {
await response.blobBody;
} else if (typeof response.readableStreamBody === "function") {
const streamBody = (response.readableStreamBody as Function)();
streamBody.on("data", () => {});
await new Promise((resolve, reject) => {
streamBody.on("end", resolve);
streamBody.on("error", reject);
});
}
upload.notified.should.be.true;
download.notified.should.be.true;
});
});
it("should honor request timeouts", async function () {
httpMock.timeout("GET", "/slow");
const request = new WebResource(
"/slow",
"GET",
undefined,
undefined,
undefined,
false,
false,
undefined,
100
);
const client = getMockedHttpClient();
try {
await client.sendRequest(request);
throw new Error("request did not fail as expected");
} catch (err) {
err.message.should.not.match(/request did not fail as expected/);
}
});
it("should give a graceful error for nonexistent hosts", async function () {
const requestUrl = "http://fake.domain";
const request = new WebResource(requestUrl, "GET");
httpMock.passThrough();
// testing the unstubbed behavior so not using local mock
const client = new DefaultHttpClient();
try {
await client.sendRequest(request);
throw new Error("request did not fail as expected");
} catch (err) {
err.should.be.instanceof(RestError);
err.code.should.equal("REQUEST_SEND_ERROR");
}
});
it("should interpret undefined as an empty body", async function () {
const requestUrl = "/expect-empty";
httpMock.put(requestUrl, async (_url, _method, body, _headers) => {
if (!body) {
return {
status: 200,
};
} else {
return {
status: 400,
body: `Expected empty body but got "${JSON.stringify(body)}"`,
};
}
});
const request = new WebResource(requestUrl, "PUT");
const client = getMockedHttpClient();
const response = await client.sendRequest(request);
response.status.should.equal(200, response.bodyAsText!);
});
it("should send HTTP requests", async function () {
const request = new WebResource("https://example.com", "GET");
request.headers.set("Access-Control-Allow-Headers", "Content-Type");
request.headers.set("Access-Control-Allow-Methods", "GET");
request.headers.set("Access-Control-Allow-Origin", "https://example.com");
httpMock.passThrough();
// testing the unstubbed behavior so not using local mock
const httpClient = new DefaultHttpClient();
const response = await httpClient.sendRequest(request);
assert.deepEqual(response.request, request);
assert.strictEqual(response.status, 200);
assert(response.headers);
assert.strictEqual(response.headers.get("content-type")!.split(";")[0], "text/html");
const responseBody: string | null | undefined = response.bodyAsText;
const expectedResponseBody = `<!doctype html>
<html>
<head>
<title>Example Domain</title>
<meta charset="utf-8" />
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<style type="text/css">
body {
background-color: #f0f0f2;
margin: 0;
padding: 0;
font-family: -apple-system, system-ui, BlinkMacSystemFont, "SegoeUI", "OpenSans", "HelveticaNeue", Helvetica, Arial, sans-serif;
}
div {
width: 600px;
margin: 5em auto;
padding: 2em;
background-color: #fdfdff;
border-radius: 0.5em;
box-shadow: 2px 3px 7px 2px rgba(0,0,0,0.02);
}
a:link, a:visited {
color: #38488f;
text-decoration: none;
}
@media (max-width: 700px) {
div {
margin: 0 auto;
width: auto;
}
}
</style>
</head>
<body>
<div>
<h1>Example Domain</h1>
<p>This domain is for use in illustrative examples in documents. You may use this
domain in literature without prior coordination or asking for permission.</p>
<p><a href="https://www.iana.org/domains/example">More information...</a></p>
</div>
</body>
</html>
`;
assert.strictEqual(
responseBody && responseBody.replace(/\s/g, ""),
expectedResponseBody.replace(/\s/g, "")
);
httpMock.teardown();
});
}); | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/machineExtensionsMappers";
import * as Parameters from "../models/parameters";
import { HybridComputeManagementClientContext } from "../hybridComputeManagementClientContext";
/** Class representing a MachineExtensions. */
export class MachineExtensions {
private readonly client: HybridComputeManagementClientContext;
/**
* Create a MachineExtensions.
* @param {HybridComputeManagementClientContext} client Reference to the service client.
*/
constructor(client: HybridComputeManagementClientContext) {
this.client = client;
}
/**
* The operation to create or update the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be created or updated.
* @param extensionName The name of the machine extension.
* @param extensionParameters Parameters supplied to the Create Machine Extension operation.
* @param [options] The optional parameters
* @returns Promise<Models.MachineExtensionsCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, name: string, extensionName: string, extensionParameters: Models.MachineExtension, options?: msRest.RequestOptionsBase): Promise<Models.MachineExtensionsCreateOrUpdateResponse> {
return this.beginCreateOrUpdate(resourceGroupName,name,extensionName,extensionParameters,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.MachineExtensionsCreateOrUpdateResponse>;
}
/**
* The operation to create or update the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be created or updated.
* @param extensionName The name of the machine extension.
* @param extensionParameters Parameters supplied to the Create Machine Extension operation.
* @param [options] The optional parameters
* @returns Promise<Models.MachineExtensionsUpdateResponse>
*/
update(resourceGroupName: string, name: string, extensionName: string, extensionParameters: Models.MachineExtensionUpdate, options?: msRest.RequestOptionsBase): Promise<Models.MachineExtensionsUpdateResponse> {
return this.beginUpdate(resourceGroupName,name,extensionName,extensionParameters,options)
.then(lroPoller => lroPoller.pollUntilFinished()) as Promise<Models.MachineExtensionsUpdateResponse>;
}
/**
* The operation to delete the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be deleted.
* @param extensionName The name of the machine extension.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, name: string, extensionName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginDeleteMethod(resourceGroupName,name,extensionName,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* The operation to get the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param extensionName The name of the machine extension.
* @param [options] The optional parameters
* @returns Promise<Models.MachineExtensionsGetResponse>
*/
get(resourceGroupName: string, name: string, extensionName: string, options?: msRest.RequestOptionsBase): Promise<Models.MachineExtensionsGetResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param extensionName The name of the machine extension.
* @param callback The callback
*/
get(resourceGroupName: string, name: string, extensionName: string, callback: msRest.ServiceCallback<Models.MachineExtension>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param extensionName The name of the machine extension.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, name: string, extensionName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.MachineExtension>): void;
get(resourceGroupName: string, name: string, extensionName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.MachineExtension>, callback?: msRest.ServiceCallback<Models.MachineExtension>): Promise<Models.MachineExtensionsGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
name,
extensionName,
options
},
getOperationSpec,
callback) as Promise<Models.MachineExtensionsGetResponse>;
}
/**
* The operation to get all extensions of a non-Azure machine
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param [options] The optional parameters
* @returns Promise<Models.MachineExtensionsListResponse>
*/
list(resourceGroupName: string, name: string, options?: Models.MachineExtensionsListOptionalParams): Promise<Models.MachineExtensionsListResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param callback The callback
*/
list(resourceGroupName: string, name: string, callback: msRest.ServiceCallback<Models.MachineExtensionsListResult>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine containing the extension.
* @param options The optional parameters
* @param callback The callback
*/
list(resourceGroupName: string, name: string, options: Models.MachineExtensionsListOptionalParams, callback: msRest.ServiceCallback<Models.MachineExtensionsListResult>): void;
list(resourceGroupName: string, name: string, options?: Models.MachineExtensionsListOptionalParams | msRest.ServiceCallback<Models.MachineExtensionsListResult>, callback?: msRest.ServiceCallback<Models.MachineExtensionsListResult>): Promise<Models.MachineExtensionsListResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
name,
options
},
listOperationSpec,
callback) as Promise<Models.MachineExtensionsListResponse>;
}
/**
* The operation to create or update the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be created or updated.
* @param extensionName The name of the machine extension.
* @param extensionParameters Parameters supplied to the Create Machine Extension operation.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginCreateOrUpdate(resourceGroupName: string, name: string, extensionName: string, extensionParameters: Models.MachineExtension, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
name,
extensionName,
extensionParameters,
options
},
beginCreateOrUpdateOperationSpec,
options);
}
/**
* The operation to create or update the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be created or updated.
* @param extensionName The name of the machine extension.
* @param extensionParameters Parameters supplied to the Create Machine Extension operation.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginUpdate(resourceGroupName: string, name: string, extensionName: string, extensionParameters: Models.MachineExtensionUpdate, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
name,
extensionName,
extensionParameters,
options
},
beginUpdateOperationSpec,
options);
}
/**
* The operation to delete the extension.
* @param resourceGroupName The name of the resource group.
* @param name The name of the machine where the extension should be deleted.
* @param extensionName The name of the machine extension.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginDeleteMethod(resourceGroupName: string, name: string, extensionName: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
resourceGroupName,
name,
extensionName,
options
},
beginDeleteMethodOperationSpec,
options);
}
/**
* The operation to get all extensions of a non-Azure machine
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.MachineExtensionsListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.MachineExtensionsListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.MachineExtensionsListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.MachineExtensionsListResult>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.MachineExtensionsListResult>, callback?: msRest.ServiceCallback<Models.MachineExtensionsListResult>): Promise<Models.MachineExtensionsListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.MachineExtensionsListNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{name}/extensions/{extensionName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.name,
Parameters.extensionName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.MachineExtension
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{name}/extensions",
urlParameters: [
Parameters.resourceGroupName,
Parameters.name,
Parameters.subscriptionId
],
queryParameters: [
Parameters.expand,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.MachineExtensionsListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const beginCreateOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{name}/extensions/{extensionName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.name,
Parameters.extensionName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "extensionParameters",
mapper: {
...Mappers.MachineExtension,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.MachineExtension
},
202: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const beginUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{name}/extensions/{extensionName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.name,
Parameters.extensionName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "extensionParameters",
mapper: {
...Mappers.MachineExtensionUpdate,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.MachineExtension
},
202: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const beginDeleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{name}/extensions/{extensionName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.name,
Parameters.extensionName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
202: {},
204: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.MachineExtensionsListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs } from "../types";
import * as utilities from "../utilities";
/**
* Hierarchical firewall policy rules let you create and enforce a consistent firewall policy across your organization. Rules can explicitly allow or deny connections or delegate evaluation to lower level policies.
*
* For more information see the [official documentation](https://cloud.google.com/vpc/docs/using-firewall-policies#create-rules)
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as gcp from "@pulumi/gcp";
*
* const defaultFirewallPolicy = new gcp.compute.FirewallPolicy("defaultFirewallPolicy", {
* parent: "organizations/12345",
* shortName: "my-policy",
* description: "Example Resource",
* });
* const defaultFirewallPolicyRule = new gcp.compute.FirewallPolicyRule("defaultFirewallPolicyRule", {
* firewallPolicy: defaultFirewallPolicy.id,
* description: "Example Resource",
* priority: 9000,
* enableLogging: true,
* action: "allow",
* direction: "EGRESS",
* disabled: false,
* match: {
* layer4Configs: [{
* ipProtocol: "tcp",
* ports: [
* 80,
* 8080,
* ],
* }],
* destIpRanges: ["11.100.0.1/32"],
* },
* });
* ```
*
* ## Import
*
* FirewallPolicyRule can be imported using any of these accepted formats
*
* ```sh
* $ pulumi import gcp:compute/firewallPolicyRule:FirewallPolicyRule default locations/global/firewallPolicies/{{firewall_policy}}/rules/{{priority}}
* ```
*
* ```sh
* $ pulumi import gcp:compute/firewallPolicyRule:FirewallPolicyRule default {{firewall_policy}}/{{priority}}
* ```
*/
export class FirewallPolicyRule extends pulumi.CustomResource {
/**
* Get an existing FirewallPolicyRule resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: FirewallPolicyRuleState, opts?: pulumi.CustomResourceOptions): FirewallPolicyRule {
return new FirewallPolicyRule(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'gcp:compute/firewallPolicyRule:FirewallPolicyRule';
/**
* Returns true if the given object is an instance of FirewallPolicyRule. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is FirewallPolicyRule {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === FirewallPolicyRule.__pulumiType;
}
/**
* The Action to perform when the client connection triggers the rule. Can currently be either "allow" or "deny()" where valid values for status are 403, 404, and 502.
*/
public readonly action!: pulumi.Output<string>;
/**
* An optional description for this resource.
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* The direction in which this rule applies. Possible values: INGRESS, EGRESS
*/
public readonly direction!: pulumi.Output<string>;
/**
* Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled.
*/
public readonly disabled!: pulumi.Output<boolean | undefined>;
/**
* Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. Logs may be exported to BigQuery or Pub/Sub. Note: you cannot enable logging on "gotoNext" rules.
*/
public readonly enableLogging!: pulumi.Output<boolean | undefined>;
/**
* The firewall policy of the resource.
*/
public readonly firewallPolicy!: pulumi.Output<string>;
/**
* Type of the resource. Always `compute#firewallPolicyRule` for firewall policy rules
*/
public /*out*/ readonly kind!: pulumi.Output<string>;
/**
* A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. Structure is documented below.
*/
public readonly match!: pulumi.Output<outputs.compute.FirewallPolicyRuleMatch>;
/**
* An integer indicating the priority of a rule in the list. The priority must be a positive value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest prority.
*/
public readonly priority!: pulumi.Output<number>;
/**
* Calculation of the complexity of a single firewall policy rule.
*/
public /*out*/ readonly ruleTupleCount!: pulumi.Output<number>;
/**
* A list of network resource URLs to which this rule applies. This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule.
*/
public readonly targetResources!: pulumi.Output<string[] | undefined>;
/**
* A list of service accounts indicating the sets of instances that are applied with this rule.
*/
public readonly targetServiceAccounts!: pulumi.Output<string[] | undefined>;
/**
* Create a FirewallPolicyRule resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: FirewallPolicyRuleArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: FirewallPolicyRuleArgs | FirewallPolicyRuleState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as FirewallPolicyRuleState | undefined;
inputs["action"] = state ? state.action : undefined;
inputs["description"] = state ? state.description : undefined;
inputs["direction"] = state ? state.direction : undefined;
inputs["disabled"] = state ? state.disabled : undefined;
inputs["enableLogging"] = state ? state.enableLogging : undefined;
inputs["firewallPolicy"] = state ? state.firewallPolicy : undefined;
inputs["kind"] = state ? state.kind : undefined;
inputs["match"] = state ? state.match : undefined;
inputs["priority"] = state ? state.priority : undefined;
inputs["ruleTupleCount"] = state ? state.ruleTupleCount : undefined;
inputs["targetResources"] = state ? state.targetResources : undefined;
inputs["targetServiceAccounts"] = state ? state.targetServiceAccounts : undefined;
} else {
const args = argsOrState as FirewallPolicyRuleArgs | undefined;
if ((!args || args.action === undefined) && !opts.urn) {
throw new Error("Missing required property 'action'");
}
if ((!args || args.direction === undefined) && !opts.urn) {
throw new Error("Missing required property 'direction'");
}
if ((!args || args.firewallPolicy === undefined) && !opts.urn) {
throw new Error("Missing required property 'firewallPolicy'");
}
if ((!args || args.match === undefined) && !opts.urn) {
throw new Error("Missing required property 'match'");
}
if ((!args || args.priority === undefined) && !opts.urn) {
throw new Error("Missing required property 'priority'");
}
inputs["action"] = args ? args.action : undefined;
inputs["description"] = args ? args.description : undefined;
inputs["direction"] = args ? args.direction : undefined;
inputs["disabled"] = args ? args.disabled : undefined;
inputs["enableLogging"] = args ? args.enableLogging : undefined;
inputs["firewallPolicy"] = args ? args.firewallPolicy : undefined;
inputs["match"] = args ? args.match : undefined;
inputs["priority"] = args ? args.priority : undefined;
inputs["targetResources"] = args ? args.targetResources : undefined;
inputs["targetServiceAccounts"] = args ? args.targetServiceAccounts : undefined;
inputs["kind"] = undefined /*out*/;
inputs["ruleTupleCount"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(FirewallPolicyRule.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering FirewallPolicyRule resources.
*/
export interface FirewallPolicyRuleState {
/**
* The Action to perform when the client connection triggers the rule. Can currently be either "allow" or "deny()" where valid values for status are 403, 404, and 502.
*/
action?: pulumi.Input<string>;
/**
* An optional description for this resource.
*/
description?: pulumi.Input<string>;
/**
* The direction in which this rule applies. Possible values: INGRESS, EGRESS
*/
direction?: pulumi.Input<string>;
/**
* Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled.
*/
disabled?: pulumi.Input<boolean>;
/**
* Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. Logs may be exported to BigQuery or Pub/Sub. Note: you cannot enable logging on "gotoNext" rules.
*/
enableLogging?: pulumi.Input<boolean>;
/**
* The firewall policy of the resource.
*/
firewallPolicy?: pulumi.Input<string>;
/**
* Type of the resource. Always `compute#firewallPolicyRule` for firewall policy rules
*/
kind?: pulumi.Input<string>;
/**
* A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. Structure is documented below.
*/
match?: pulumi.Input<inputs.compute.FirewallPolicyRuleMatch>;
/**
* An integer indicating the priority of a rule in the list. The priority must be a positive value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest prority.
*/
priority?: pulumi.Input<number>;
/**
* Calculation of the complexity of a single firewall policy rule.
*/
ruleTupleCount?: pulumi.Input<number>;
/**
* A list of network resource URLs to which this rule applies. This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule.
*/
targetResources?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A list of service accounts indicating the sets of instances that are applied with this rule.
*/
targetServiceAccounts?: pulumi.Input<pulumi.Input<string>[]>;
}
/**
* The set of arguments for constructing a FirewallPolicyRule resource.
*/
export interface FirewallPolicyRuleArgs {
/**
* The Action to perform when the client connection triggers the rule. Can currently be either "allow" or "deny()" where valid values for status are 403, 404, and 502.
*/
action: pulumi.Input<string>;
/**
* An optional description for this resource.
*/
description?: pulumi.Input<string>;
/**
* The direction in which this rule applies. Possible values: INGRESS, EGRESS
*/
direction: pulumi.Input<string>;
/**
* Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled.
*/
disabled?: pulumi.Input<boolean>;
/**
* Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. Logs may be exported to BigQuery or Pub/Sub. Note: you cannot enable logging on "gotoNext" rules.
*/
enableLogging?: pulumi.Input<boolean>;
/**
* The firewall policy of the resource.
*/
firewallPolicy: pulumi.Input<string>;
/**
* A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. Structure is documented below.
*/
match: pulumi.Input<inputs.compute.FirewallPolicyRuleMatch>;
/**
* An integer indicating the priority of a rule in the list. The priority must be a positive value between 0 and 2147483647. Rules are evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest prority.
*/
priority: pulumi.Input<number>;
/**
* A list of network resource URLs to which this rule applies. This field allows you to control which network's VMs get this rule. If this field is left blank, all VMs within the organization will receive the rule.
*/
targetResources?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A list of service accounts indicating the sets of instances that are applied with this rule.
*/
targetServiceAccounts?: pulumi.Input<pulumi.Input<string>[]>;
} | the_stack |
jest.unmock("@dharmaprotocol/contracts");
// Unmock the "fs-extra" package in order to give us
// access to the deployed TokenRegistry on the
// test chain.
jest.unmock("fs-extra");
// libraries
import * as moment from "moment";
import * as Web3 from "web3";
// utils
import { BigNumber } from "../../../utils/bignumber";
import { TOKEN_REGISTRY_TRACKED_TOKENS } from "../../../utils/constants";
import * as Units from "../../../utils/units";
import { Web3Utils } from "../../../utils/web3_utils";
import { ACCOUNTS } from "../../accounts";
// wrappers
import {
CollateralizedSimpleInterestTermsContractContract,
DebtKernelContract,
ERC20Contract,
RepaymentRouterContract,
} from "../../../src/wrappers";
// types
import { DEBT_ORDER_DATA_DEFAULTS, DebtOrderData, DebtRegistryEntry } from "../../../src/types";
// adapters
import {
CollateralizedSimpleInterestLoanAdapter,
CollateralizedSimpleInterestLoanOrder,
CollateralizedTermsContractParameters,
CollateralizerAdapterErrors,
} from "../../../src/adapters/collateralized_simple_interest_loan_adapter";
import { CollateralizedLoanTerms } from "../../../src/adapters/collateralized_simple_interest_loan_terms";
import {
SimpleInterestAdapterErrors,
SimpleInterestLoanAdapter,
} from "../../../src/adapters/simple_interest_loan_adapter";
import { ContractsAPI, ContractsError } from "../../../src/apis/contracts_api";
const provider = new Web3.providers.HttpProvider("http://localhost:8545");
const web3 = new Web3(provider);
const web3Utils = new Web3Utils(web3);
const contracts = new ContractsAPI(web3);
const collateralizedSimpleInterestLoanAdapter = new CollateralizedSimpleInterestLoanAdapter(
web3,
contracts,
);
const collateralizedLoanTerms = new CollateralizedLoanTerms(web3, contracts);
const TX_DEFAULTS = { from: ACCOUNTS[0].address, gas: 4712388 };
const REP_TOKEN_SYMBOL = "REP";
const ZRX_TOKEN_SYMBOL = "ZRX";
const MKR_TOKEN_SYMBOL = "MKR";
interface Scenario {
unpackedParams: CollateralizedTermsContractParameters;
packedParams: string;
}
describe("Collateralized Terms Contract Interface (Unit Tests)", () => {
let snapshotId: number;
beforeEach(async () => {
snapshotId = await web3Utils.saveTestSnapshot();
});
afterEach(async () => {
await web3Utils.revertToSnapshot(snapshotId);
});
const scenario1: Scenario = {
unpackedParams: {
collateralTokenIndex: new BigNumber(0),
collateralAmount: new BigNumber(3.5 * 10 ** 18),
gracePeriodInDays: new BigNumber(5),
},
packedParams: "0x000000000000000000000000000000000000000000000030927f74c9de000005",
};
const scenario2: Scenario = {
unpackedParams: {
collateralTokenIndex: new BigNumber(1),
collateralAmount: new BigNumber(723489020 * 10 ** 18),
gracePeriodInDays: new BigNumber(30),
},
packedParams: "0x00000000000000000000000000000000000000125674c25cd7f81d067000001e",
};
const scenario3: Scenario = {
unpackedParams: {
collateralTokenIndex: new BigNumber(8),
collateralAmount: new BigNumber(1212234234 * 10 ** 18),
gracePeriodInDays: new BigNumber(90),
},
packedParams: "0x0000000000000000000000000000000000000083eabc9580d20c1abba800005a",
};
describe("#packParameters", () => {
describe("...with invalid collateral token index", () => {
test("should throw INVALID_TOKEN_INDEX error", () => {
const invalidCollateralTokenIndex = new BigNumber(300);
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralTokenIndex: invalidCollateralTokenIndex,
});
}).toThrow(
CollateralizerAdapterErrors.INVALID_TOKEN_INDEX(invalidCollateralTokenIndex),
);
});
});
describe("...with collateral token index that is not tracked", () => {
test("should throw INVALID_TOKEN_INDEX error", () => {
const invalidCollateralTokenIndex = new BigNumber(
TOKEN_REGISTRY_TRACKED_TOKENS.length,
);
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralTokenIndex: invalidCollateralTokenIndex,
});
}).toThrow(
CollateralizerAdapterErrors.INVALID_TOKEN_INDEX(invalidCollateralTokenIndex),
);
});
});
describe("...with collateral amount > 2^92 - 1", () => {
test("should throw COLLATERAL_AMOUNT_EXCEEDS_MAXIMUM error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralAmount: new BigNumber(3.5 * 10 ** 38),
});
}).toThrow(CollateralizerAdapterErrors.COLLATERAL_AMOUNT_EXCEEDS_MAXIMUM());
});
});
describe("...with collateral amount < 0", () => {
test("should throw COLLATERAL_AMOUNT_MUST_BE_POSITIVE error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralAmount: new BigNumber(0),
});
}).toThrow(CollateralizerAdapterErrors.COLLATERAL_AMOUNT_MUST_BE_POSITIVE());
});
});
describe("...with collateral amount = 0", () => {
test("should throw COLLATERAL_AMOUNT_MUST_BE_POSITIVE error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralAmount: new BigNumber(-1),
});
}).toThrow(CollateralizerAdapterErrors.COLLATERAL_AMOUNT_MUST_BE_POSITIVE());
});
});
describe("...with collateral amount containing decimals", () => {
test("should throw INVALID_DECIMAL_VALUE error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
collateralAmount: new BigNumber(100.4567),
});
}).toThrowError(CollateralizerAdapterErrors.INVALID_DECIMAL_VALUE());
});
});
describe("...with grace period in days < 0", () => {
test("should throw GRACE_PERIOD_IS_NEGATIVE error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
gracePeriodInDays: new BigNumber(-1),
});
}).toThrowError(CollateralizerAdapterErrors.GRACE_PERIOD_IS_NEGATIVE());
});
});
describe("...with grace period in days > 255", () => {
test("should throw GRACE_PERIOD_EXCEEDS_MAXIMUM error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
gracePeriodInDays: new BigNumber(256),
});
}).toThrowError(CollateralizerAdapterErrors.GRACE_PERIOD_EXCEEDS_MAXIMUM());
});
});
describe("...with grace period containing decimals", () => {
test("should throw INVALID_DECIMAL_VALUE error", () => {
expect(() => {
collateralizedLoanTerms.packParameters({
...scenario1.unpackedParams,
gracePeriodInDays: new BigNumber(1.567),
});
}).toThrowError(CollateralizerAdapterErrors.INVALID_DECIMAL_VALUE());
});
});
describe("...with valid collateral token index, collateral amount, and grace period in days", () => {
describe("Scenario #1", () => {
test("should return correctly packed parameters", () => {
expect(
collateralizedLoanTerms.packParameters(scenario1.unpackedParams),
).toEqual(scenario1.packedParams);
});
});
describe("Scenario #2", () => {
test("should return correctly packed parameters", () => {
expect(
collateralizedLoanTerms.packParameters(scenario2.unpackedParams),
).toEqual(scenario2.packedParams);
});
});
describe("Scenario #3", () => {
test("should return correctly packed parameters", () => {
expect(
collateralizedLoanTerms.packParameters(scenario3.unpackedParams),
).toEqual(scenario3.packedParams);
});
});
});
});
describe("#unpackParameters", () => {
describe("...with value that has too few bytes", () => {
const termsContractParameters = "0x" + "f".repeat(63);
test("should throw INVALID_PACKED_PARAMETERS error", () => {
expect(() => {
collateralizedLoanTerms.unpackParameters(termsContractParameters);
}).toThrowError(/Expected packedParams to conform to schema \/Bytes32/);
});
});
describe("...with value that has too many bytes", () => {
const termsContractParameters = "0x" + "f".repeat(65);
test("should throw INVALID_PACKED_PARAMETERS error", () => {
expect(() => {
collateralizedLoanTerms.unpackParameters(termsContractParameters);
}).toThrowError(/Expected packedParams to conform to schema \/Bytes32/);
});
});
describe("...with value that includes non-hexadecimal characters", () => {
const termsContractParameters = "0x" + "z".repeat(64);
test("should throw INVALID_PACKED_PARAMETERS error", () => {
expect(() => {
collateralizedLoanTerms.unpackParameters(termsContractParameters);
}).toThrowError(/Expected packedParams to conform to schema \/Bytes32/);
});
});
});
describe("...with valid termsContractParameters string", () => {
describe("Scenario #1", () => {
test("should return correctly unpacked parameters", () => {
expect(collateralizedLoanTerms.unpackParameters(scenario1.packedParams)).toEqual(
scenario1.unpackedParams,
);
});
});
describe("Scenario #2", () => {
test("should return correctly unpacked parameters", () => {
expect(collateralizedLoanTerms.unpackParameters(scenario2.packedParams)).toEqual(
scenario2.unpackedParams,
);
});
});
describe("Scenario #3", () => {
test("should return correctly unpacked parameters", () => {
expect(collateralizedLoanTerms.unpackParameters(scenario3.packedParams)).toEqual(
scenario3.unpackedParams,
);
});
});
});
});
describe("Collateralized Simple Interest Loan Adapter (Unit Tests)", () => {
interface AdapterScenario {
debtOrderData: DebtOrderData;
fullLoanOrder: CollateralizedSimpleInterestLoanOrder;
minimalLoanOrder: CollateralizedSimpleInterestLoanOrder;
entry: DebtRegistryEntry;
}
let scenario1: AdapterScenario;
let scenario2: AdapterScenario;
let scenario3: AdapterScenario;
beforeAll(async () => {
const debtKernel = await DebtKernelContract.deployed(web3, TX_DEFAULTS);
const repaymentRouter = await RepaymentRouterContract.deployed(web3, TX_DEFAULTS);
const termsContract = await contracts.loadCollateralizedSimpleInterestTermsContract(
TX_DEFAULTS,
);
const tokenSymbols = await Promise.all(
[0, 1, 2].map((index) => contracts.getTokenSymbolByIndexAsync(new BigNumber(index))),
);
const tokenAddresses = await Promise.all(
tokenSymbols.map((symbol) => contracts.getTokenAddressBySymbolAsync(symbol)),
);
const principalAmountForScenario1 = new BigNumber(1000 * 10 ** 18);
const principalAmountForScenario2 = new BigNumber(12 * 10 ** 18);
const principalAmountForScenario3 = new BigNumber(50 * 10 ** 18);
const debtOrderDataBase = {
...DEBT_ORDER_DATA_DEFAULTS,
kernelVersion: debtKernel.address,
issuanceVersion: repaymentRouter.address,
termsContract: termsContract.address,
};
const debtOrderDataForScenario1 = {
...debtOrderDataBase,
principalAmount: principalAmountForScenario1,
principalToken: tokenAddresses[0],
termsContractParameters:
"0x000000003635c9adc5dea000000003e8300020200000008ac7230489e800005a",
};
const debtOrderDataForScenario2 = {
...debtOrderDataBase,
principalAmount: principalAmountForScenario2,
principalToken: tokenAddresses[1],
termsContractParameters:
"0x0100000000a688906bd8b000000004b0400030000000004563918244f4000078",
};
const debtOrderDataForScenario3 = {
...debtOrderDataBase,
principalAmount: principalAmountForScenario3,
principalToken: tokenAddresses[2],
termsContractParameters:
"0x0200000002b5e3af16b18800000007d02000a010000001bc16d674ec8000000a",
};
const loanOrderParamsForScenario1 = {
principalTokenSymbol: tokenSymbols[0],
principalAmount: principalAmountForScenario1,
interestRate: new BigNumber(0.1),
amortizationUnit: SimpleInterestLoanAdapter.Installments.MONTHLY,
termLength: new BigNumber(2),
collateralTokenSymbol: tokenSymbols[2],
collateralAmount: new BigNumber(10 * 10 ** 18),
gracePeriodInDays: new BigNumber(90),
};
const loanOrderParamsForScenario2 = {
principalTokenSymbol: tokenSymbols[1],
principalAmount: principalAmountForScenario2,
interestRate: new BigNumber(0.12),
amortizationUnit: SimpleInterestLoanAdapter.Installments.YEARLY,
termLength: new BigNumber(3),
collateralTokenSymbol: tokenSymbols[0],
collateralAmount: new BigNumber(5 * 10 ** 18),
gracePeriodInDays: new BigNumber(120),
};
const loanOrderParamsForScenario3 = {
principalTokenSymbol: tokenSymbols[2],
principalAmount: principalAmountForScenario3,
interestRate: new BigNumber(0.2),
amortizationUnit: SimpleInterestLoanAdapter.Installments.WEEKLY,
termLength: new BigNumber(10),
collateralTokenSymbol: tokenSymbols[1],
collateralAmount: new BigNumber(32 * 10 ** 18),
gracePeriodInDays: new BigNumber(10),
};
const debtRegistryEntryBase = {
version: repaymentRouter.address,
beneficiary: ACCOUNTS[0].address,
underwriter: ACCOUNTS[1].address,
underwriterRiskRating: Units.percent(0.1),
termsContract: termsContract.address,
issuanceBlockTimestamp: new BigNumber(moment().unix()),
};
scenario1 = {
debtOrderData: debtOrderDataForScenario1,
fullLoanOrder: {
...debtOrderDataForScenario1,
...loanOrderParamsForScenario1,
},
minimalLoanOrder: loanOrderParamsForScenario1,
entry: {
...debtRegistryEntryBase,
termsContractParameters: debtOrderDataForScenario1.termsContractParameters,
},
};
scenario2 = {
debtOrderData: debtOrderDataForScenario2,
fullLoanOrder: {
...debtOrderDataForScenario2,
...loanOrderParamsForScenario2,
},
minimalLoanOrder: loanOrderParamsForScenario2,
entry: {
...debtRegistryEntryBase,
termsContractParameters: debtOrderDataForScenario2.termsContractParameters,
},
};
scenario3 = {
debtOrderData: debtOrderDataForScenario3,
fullLoanOrder: {
...debtOrderDataForScenario3,
...loanOrderParamsForScenario3,
},
minimalLoanOrder: loanOrderParamsForScenario3,
entry: {
...debtRegistryEntryBase,
termsContractParameters: debtOrderDataForScenario3.termsContractParameters,
},
};
});
describe("#toDebtOrder", () => {
describe("collateralized simple interest loan's required parameter is missing or malformed", () => {
describe("`collateralTokenSymbol` is missing", () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder({
...scenario1.minimalLoanOrder,
collateralTokenSymbol: undefined,
}),
).rejects.toThrow('instance requires property "collateralTokenSymbol"');
});
});
describe("`collateralTokenSymbol` is not tracked by Token Registry", () => {
test("should throw CANNOT_FIND_TOKEN_WITH_SYMBOL", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder({
...scenario1.minimalLoanOrder,
collateralTokenSymbol: "XXX", // XXX is not tracked in our test env's registry
}),
).rejects.toThrow(ContractsError.CANNOT_FIND_TOKEN_WITH_SYMBOL("XXX"));
});
});
describe("`collateralAmount` is missing", async () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder({
...scenario1.minimalLoanOrder,
collateralAmount: undefined,
}),
).rejects.toThrow('instance requires property "collateralAmount"');
});
});
describe("`gracePeriodInDays` is missing", async () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder({
...scenario1.minimalLoanOrder,
gracePeriodInDays: undefined,
}),
).rejects.toThrow('instance requires property "gracePeriodInDays"');
});
});
});
describe("collateralized simple interest loan's required parameters are present and well-formed ", () => {
describe("Scenario #1", () => {
test("should return debt order with correctly packed values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder(
scenario1.minimalLoanOrder,
),
).resolves.toEqual(scenario1.debtOrderData);
});
});
describe("Scenario #2", () => {
test("should return debt order with correctly packed values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder(
scenario2.minimalLoanOrder,
),
).resolves.toEqual(scenario2.debtOrderData);
});
});
describe("Scenario #3", () => {
test("should return debt order with correctly packed values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.toDebtOrder(
scenario3.minimalLoanOrder,
),
).resolves.toEqual(scenario3.debtOrderData);
});
});
});
});
describe("#fromDebtOrder()", () => {
describe("argument does not conform to the DebtOrderWithTermsSpecified schema", () => {
describe("malformed terms contract", () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
termsContract: "invalid terms contract",
}),
).rejects.toThrow("instance.termsContract does not match pattern");
});
});
describe("missing termsContract", () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
termsContract: undefined,
}),
).rejects.toThrow('instance requires property "termsContract"');
});
});
describe("missing termsContractParameters", () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
termsContractParameters: undefined,
}),
).rejects.toThrow('instance requires property "termsContractParameters"');
});
});
describe("missing principalAmount", async () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
principalAmount: undefined,
}),
).rejects.toThrow('instance requires property "principalAmount"');
});
});
describe("missing principalToken", async () => {
test("should throw DOES_NOT_CONFORM_TO_SCHEMA", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
principalToken: undefined,
}),
).rejects.toThrow('instance requires property "principalToken"');
});
});
});
describe("terms contract does not match principal token's associated `CollateralizedSimpleInterestTermsContract`", () => {
test("should throw MISMATCHED_TOKEN_SYMBOL", async () => {
const principalTokenSymbol = await contracts.getTokenSymbolByIndexAsync(
new BigNumber(1),
);
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
// the principal token index is encoded as 1 instead of 0.
termsContractParameters:
"0x010000003635c9adc5dea000000003e8300020200000008ac7230489e800005a",
}),
).rejects.toThrow(
CollateralizerAdapterErrors.MISMATCHED_TOKEN_SYMBOL(
scenario1.debtOrderData.principalToken,
principalTokenSymbol,
),
);
});
});
describe("terms contract params contains token index out of bounds", () => {
test("should throw CANNOT_FIND_TOKEN_WITH_INDEX", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
// the principal token index is encoded as 255, which does not map to any
// token listed in our `TokenRegistry`
termsContractParameters:
"0xff0000003635c9adc5dea000000003e8300020200000008ac7230489e800005a",
}),
).rejects.toThrow(ContractsError.CANNOT_FIND_TOKEN_WITH_INDEX(255));
});
});
describe("amortization specified in termsContractParameters is of invalid type", () => {
it("should throw INVALID_AMORTIZATION_UNIT_TYPE", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder({
...scenario1.debtOrderData,
// The amortization unit is encoded as 6 (which is invalid) instead of 3.
termsContractParameters:
"0x000000003635c9adc5dea000000003e8600020200000008ac7230489e800005a",
}),
).rejects.toThrow(SimpleInterestAdapterErrors.INVALID_AMORTIZATION_UNIT_TYPE());
});
});
describe("debt order is valid and well-formed", () => {
describe("Scenario #1", () => {
test("should return `CollateralizedSimpleInterestLoanOrder` with correctly unpacked values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder(
scenario1.debtOrderData,
),
).resolves.toEqual(scenario1.fullLoanOrder);
});
});
describe("Scenario #2", () => {
test("should return `CollateralizedSimpleInterestLoanOrder` with correctly unpacked values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder(
scenario2.debtOrderData,
),
).resolves.toEqual(scenario2.fullLoanOrder);
});
});
describe("Scenario #3", () => {
test("should return `CollateralizedSimpleInterestLoanOrder` with correctly unpacked values", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtOrder(
scenario3.debtOrderData,
),
).resolves.toEqual(scenario3.fullLoanOrder);
});
});
});
});
describe("#fromDebtRegistryEntry", () => {
describe("no principal token tracked at that index", () => {
it("should throw CANNOT_FIND_TOKEN_WITH_INDEX", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtRegistryEntry({
...scenario1.entry,
// Our test environment does not track a token at index 255 (which is packed
// into the first byte of the parameters)
termsContractParameters:
"0xff000000000de0b6b3a764000000057820002000000000000000000000000000",
}),
).rejects.toThrow(ContractsError.CANNOT_FIND_TOKEN_WITH_INDEX(255));
});
});
describe("refers to incorrect terms contract", () => {
test("should throw MISMATCHED_TERMS_CONTRACT", async () => {
// We choose an arbitrary address to represent
// a different terms contract's address.
const INVALID_ADDRESS = ACCOUNTS[3].address;
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtRegistryEntry({
...scenario1.entry,
termsContract: INVALID_ADDRESS,
}),
).rejects.toThrow(
CollateralizerAdapterErrors.MISMATCHED_TERMS_CONTRACT(INVALID_ADDRESS),
);
});
});
describe("entry parameters are valid", () => {
describe("Scenario #1:", () => {
test("should return correct collateralized simple interest loan order", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtRegistryEntry(
scenario1.entry,
),
).resolves.toEqual(scenario1.minimalLoanOrder);
});
});
describe("Scenario #2:", () => {
test("should return correct collateralized simple interest loan order", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtRegistryEntry(
scenario2.entry,
),
).resolves.toEqual(scenario2.minimalLoanOrder);
});
});
describe("Scenario #3:", () => {
test("should return correct collateralized simple interest loan order", async () => {
await expect(
collateralizedSimpleInterestLoanAdapter.fromDebtRegistryEntry(
scenario3.entry,
),
).resolves.toEqual(scenario3.minimalLoanOrder);
});
});
});
});
}); | the_stack |
import * as Common from '../../core/common/common.js';
import * as SDK from '../../core/sdk/sdk.js';
import * as Protocol from '../../generated/protocol.js';
import type {LayerPaintEvent} from './TimelineFrameModel.js';
export class TracingLayerTree extends SDK.LayerTreeBase.LayerTreeBase {
private tileById: Map<string, TracingLayerTile>;
private paintProfilerModel: SDK.PaintProfiler.PaintProfilerModel|null;
constructor(target: SDK.Target.Target|null) {
super(target);
this.tileById = new Map();
this.paintProfilerModel = target && target.model(SDK.PaintProfiler.PaintProfilerModel);
}
async setLayers(root: TracingLayerPayload|null, layers: TracingLayerPayload[]|null, paints: LayerPaintEvent[]):
Promise<void> {
const idsToResolve = new Set<Protocol.DOM.BackendNodeId>();
if (root) {
// This is a legacy code path for compatibility, as cc is removing
// layer tree hierarchy, this code will eventually be removed.
this.extractNodeIdsToResolve(idsToResolve, {}, root);
} else if (layers) {
for (let i = 0; i < layers.length; ++i) {
this.extractNodeIdsToResolve(idsToResolve, {}, layers[i]);
}
}
await this.resolveBackendNodeIds(idsToResolve);
const oldLayersById = this.layersById;
this.layersById = new Map();
this.setContentRoot(null);
if (root) {
const convertedLayers = this.innerSetLayers(oldLayersById, root);
this.setRoot(convertedLayers);
} else if (layers) {
const processedLayers = layers.map(this.innerSetLayers.bind(this, oldLayersById));
const contentRoot = this.contentRoot();
if (!contentRoot) {
throw new Error('Content root is not set.');
}
this.setRoot(contentRoot);
for (let i = 0; i < processedLayers.length; ++i) {
if (processedLayers[i].id() !== contentRoot.id()) {
contentRoot.addChild(processedLayers[i]);
}
}
}
this.setPaints(paints);
}
setTiles(tiles: TracingLayerTile[]): void {
this.tileById = new Map();
for (const tile of tiles) {
this.tileById.set(tile.id, tile);
}
}
pictureForRasterTile(tileId: string): Promise<SDK.PaintProfiler.SnapshotWithRect|null> {
const tile = this.tileById.get('cc::Tile/' + tileId);
if (!tile) {
Common.Console.Console.instance().error(`Tile ${tileId} is missing`);
return Promise.resolve(null) as Promise<SDK.PaintProfiler.SnapshotWithRect|null>;
}
const layer = (this.layerById(tile.layer_id) as TracingLayer | null);
if (!layer) {
Common.Console.Console.instance().error(`Layer ${tile.layer_id} for tile ${tileId} is not found`);
return Promise.resolve(null) as Promise<SDK.PaintProfiler.SnapshotWithRect|null>;
}
return layer.pictureForRect(tile.content_rect);
}
private setPaints(paints: LayerPaintEvent[]): void {
for (let i = 0; i < paints.length; ++i) {
const layer = (this.layersById.get(paints[i].layerId()) as TracingLayer | null);
if (layer) {
layer.addPaintEvent(paints[i]);
}
}
}
private innerSetLayers(oldLayersById: Map<string|number, SDK.LayerTreeBase.Layer>, payload: TracingLayerPayload):
TracingLayer {
let layer = (oldLayersById.get(payload.layer_id) as TracingLayer | null);
if (layer) {
layer.reset(payload);
} else {
layer = new TracingLayer(this.paintProfilerModel, payload);
}
this.layersById.set(payload.layer_id, layer);
if (payload.owner_node) {
layer.setNode(this.backendNodeIdToNode().get(payload.owner_node) || null);
}
if (!this.contentRoot() && layer.drawsContent()) {
this.setContentRoot(layer);
}
for (let i = 0; payload.children && i < payload.children.length; ++i) {
layer.addChild(this.innerSetLayers(oldLayersById, payload.children[i]));
}
return layer;
}
private extractNodeIdsToResolve(
nodeIdsToResolve: Set<Protocol.DOM.BackendNodeId>, seenNodeIds: Object, payload: TracingLayerPayload): void {
const backendNodeId = payload.owner_node;
if (backendNodeId && !this.backendNodeIdToNode().has(backendNodeId)) {
nodeIdsToResolve.add(backendNodeId);
}
for (let i = 0; payload.children && i < payload.children.length; ++i) {
this.extractNodeIdsToResolve(nodeIdsToResolve, seenNodeIds, payload.children[i]);
}
}
}
export class TracingLayer implements SDK.LayerTreeBase.Layer {
private parentLayerId: string|null;
private parentInternal: SDK.LayerTreeBase.Layer|null;
private layerId: string;
private nodeInternal: SDK.DOMModel.DOMNode|null;
private offsetXInternal: number;
private offsetYInternal: number;
private widthInternal: number;
private heightInternal: number;
private childrenInternal: SDK.LayerTreeBase.Layer[];
private quadInternal: number[];
private scrollRectsInternal: Protocol.LayerTree.ScrollRect[];
private gpuMemoryUsageInternal: number;
private paints: LayerPaintEvent[];
private compositingReasonIds: string[];
private drawsContentInternal: boolean;
private paintProfilerModel: SDK.PaintProfiler.PaintProfilerModel|null;
constructor(paintProfilerModel: SDK.PaintProfiler.PaintProfilerModel|null, payload: TracingLayerPayload) {
this.parentLayerId = null;
this.parentInternal = null;
this.layerId = '';
this.nodeInternal = null;
this.offsetXInternal = -1;
this.offsetYInternal = -1;
this.widthInternal = -1;
this.heightInternal = -1;
this.childrenInternal = [];
this.quadInternal = [];
this.scrollRectsInternal = [];
this.gpuMemoryUsageInternal = -1;
this.paints = [];
this.compositingReasonIds = [];
this.drawsContentInternal = false;
this.paintProfilerModel = paintProfilerModel;
this.reset(payload);
}
reset(payload: TracingLayerPayload): void {
this.nodeInternal = null;
this.layerId = String(payload.layer_id);
this.offsetXInternal = payload.position[0];
this.offsetYInternal = payload.position[1];
this.widthInternal = payload.bounds.width;
this.heightInternal = payload.bounds.height;
this.childrenInternal = [];
this.parentLayerId = null;
this.parentInternal = null;
this.quadInternal = payload.layer_quad || [];
this.createScrollRects(payload);
// Keep payload.compositing_reasons as a default
// but use the newer payload.debug_info.compositing_reasons
// if the first one is not set.
this.compositingReasonIds =
payload.compositing_reason_ids || (payload.debug_info && payload.debug_info.compositing_reason_ids) || [];
this.drawsContentInternal = Boolean(payload.draws_content);
this.gpuMemoryUsageInternal = payload.gpu_memory_usage;
/** @type {!Array<!LayerPaintEvent>} */
this.paints = [];
}
id(): string {
return this.layerId;
}
parentId(): string|null {
return this.parentLayerId;
}
parent(): SDK.LayerTreeBase.Layer|null {
return this.parentInternal;
}
isRoot(): boolean {
return !this.parentId();
}
children(): SDK.LayerTreeBase.Layer[] {
return this.childrenInternal;
}
addChild(childParam: SDK.LayerTreeBase.Layer): void {
const child = (childParam as TracingLayer);
if (child.parentInternal) {
console.assert(false, 'Child already has a parent');
}
this.childrenInternal.push(child);
child.parentInternal = this;
child.parentLayerId = this.layerId;
}
setNode(node: SDK.DOMModel.DOMNode|null): void {
this.nodeInternal = node;
}
node(): SDK.DOMModel.DOMNode|null {
return this.nodeInternal;
}
nodeForSelfOrAncestor(): SDK.DOMModel.DOMNode|null {
let layer: (SDK.LayerTreeBase.Layer|null)|this = this;
for (; layer; layer = layer.parent()) {
if (layer.node()) {
return layer.node();
}
}
return null;
}
offsetX(): number {
return this.offsetXInternal;
}
offsetY(): number {
return this.offsetYInternal;
}
width(): number {
return this.widthInternal;
}
height(): number {
return this.heightInternal;
}
transform(): number[]|null {
return null;
}
quad(): number[] {
return this.quadInternal;
}
anchorPoint(): number[] {
return [0.5, 0.5, 0];
}
invisible(): boolean {
return false;
}
paintCount(): number {
return 0;
}
lastPaintRect(): Protocol.DOM.Rect|null {
return null;
}
scrollRects(): Protocol.LayerTree.ScrollRect[] {
return this.scrollRectsInternal;
}
stickyPositionConstraint(): SDK.LayerTreeBase.StickyPositionConstraint|null {
// TODO(smcgruer): Provide sticky layer information in traces.
return null;
}
gpuMemoryUsage(): number {
return this.gpuMemoryUsageInternal;
}
snapshots(): Promise<SDK.PaintProfiler.SnapshotWithRect|null>[] {
return this.paints.map(paint => paint.snapshotPromise().then(snapshot => {
if (!snapshot) {
return null;
}
const rect = {x: snapshot.rect[0], y: snapshot.rect[1], width: snapshot.rect[2], height: snapshot.rect[3]};
return {rect: rect, snapshot: snapshot.snapshot};
}));
}
pictureForRect(targetRect: number[]): Promise<SDK.PaintProfiler.SnapshotWithRect|null> {
return Promise.all(this.paints.map(paint => paint.picturePromise())).then(pictures => {
const filteredPictures = (pictures.filter(picture => picture && rectsOverlap(picture.rect, targetRect)) as {
rect: Array<number>,
serializedPicture: string,
}[]);
const fragments = filteredPictures.map(
picture => ({x: picture.rect[0], y: picture.rect[1], picture: picture.serializedPicture}));
if (!fragments.length || !this.paintProfilerModel) {
return null;
}
const x0 = fragments.reduce((min, item) => Math.min(min, item.x), Infinity);
const y0 = fragments.reduce((min, item) => Math.min(min, item.y), Infinity);
// Rect is in layer content coordinates, make it relative to picture by offsetting to the top left corner.
const rect = {x: targetRect[0] - x0, y: targetRect[1] - y0, width: targetRect[2], height: targetRect[3]};
return this.paintProfilerModel.loadSnapshotFromFragments(fragments).then(
snapshot => snapshot ? {rect: rect, snapshot: snapshot} : null);
});
function segmentsOverlap(a1: number, a2: number, b1: number, b2: number): boolean {
console.assert(a1 <= a2 && b1 <= b2, 'segments should be specified as ordered pairs');
return a2 > b1 && a1 < b2;
}
function rectsOverlap(a: number[], b: number[]): boolean {
return segmentsOverlap(a[0], a[0] + a[2], b[0], b[0] + b[2]) &&
segmentsOverlap(a[1], a[1] + a[3], b[1], b[1] + b[3]);
}
}
private scrollRectsFromParams(params: number[], type: Protocol.LayerTree.ScrollRectType):
Protocol.LayerTree.ScrollRect {
return {rect: {x: params[0], y: params[1], width: params[2], height: params[3]}, type: type};
}
private createScrollRects(payload: TracingLayerPayload): void {
const nonPayloadScrollRects: Protocol.LayerTree.ScrollRect[] = [];
if (payload.non_fast_scrollable_region) {
nonPayloadScrollRects.push(this.scrollRectsFromParams(
payload.non_fast_scrollable_region, 'NonFastScrollable' as Protocol.LayerTree.ScrollRectType));
}
if (payload.touch_event_handler_region) {
nonPayloadScrollRects.push(this.scrollRectsFromParams(
payload.touch_event_handler_region, Protocol.LayerTree.ScrollRectType.TouchEventHandler));
}
if (payload.wheel_event_handler_region) {
nonPayloadScrollRects.push(this.scrollRectsFromParams(
payload.wheel_event_handler_region, Protocol.LayerTree.ScrollRectType.WheelEventHandler));
}
if (payload.scroll_event_handler_region) {
nonPayloadScrollRects.push(this.scrollRectsFromParams(
payload.scroll_event_handler_region, Protocol.LayerTree.ScrollRectType.RepaintsOnScroll));
}
// SDK.LayerBaseTree.Layer.ScrollRectType and Protocol.LayerTree.ScrollRectType are the
// same type, but we need to use the indirection of the nonPayloadScrollRects since
// the ScrollRectType is defined as a string in SDK.LayerBaseTree.Layer.ScrollRectType.
this.scrollRectsInternal = nonPayloadScrollRects;
}
addPaintEvent(paint: LayerPaintEvent): void {
this.paints.push(paint);
}
requestCompositingReasonIds(): Promise<string[]> {
return Promise.resolve(this.compositingReasonIds);
}
drawsContent(): boolean {
return this.drawsContentInternal;
}
}
export interface TracingLayerPayload {
bounds: {height: number, width: number};
children: TracingLayerPayload[];
layer_id: number;
position: number[];
scroll_offset: number[];
layer_quad: number[];
draws_content: number;
gpu_memory_usage: number;
transform: number[];
owner_node: Protocol.DOM.BackendNodeId;
reasons: string[];
compositing_reason: string[];
compositing_reason_ids: string[];
debug_info: {compositing_reason_ids: string[]};
non_fast_scrollable_region: number[];
touch_event_handler_region: number[];
wheel_event_handler_region: number[];
scroll_event_handler_region: number[];
}
export interface TracingLayerTile {
id: string;
layer_id: string;
gpu_memory_usage: number;
content_rect: number[];
} | the_stack |
import { IPoint, IBBox, Item, BubblesetCfg } from '../../types';
import {
squareDist,
pointLineSquareDist,
itemIntersectByLine,
getPointsCenter,
fractionToLine,
isPointsOverlap,
pointRectSquareDist,
Line,
isPointInPolygon,
} from '../../util/math';
const defaultOps = {
maxRoutingIterations: 100, // number of times to run the algorithm to refine the path finding in difficult areas
maxMarchingIterations: 100, // number of times to refine the boundary
pixelGroupSize: 2, // the resolution of the algorithm in square pixels
edgeR0: 10, // the distance from edges at which energy is 1 (full influence)
edgeR1: 10, // the distance from edges at which energy is 0 (no influence)
nodeR0: 5, // the distance from nodes which energy is 1 (full influence)
nodeR1: 10, // the distance from nodes at which energy is 0 (no influence)
morphBuffer: 5, // DEFAULT_NODE_R0; the amount of space to move the virtual edge when wrapping around obstacles
threshold: 0.001,
skip: 16,
nodeInfluenceFactor: 1,
edgeInfluenceFactor: 1,
negativeNodeInfluenceFactor: -0.5,
};
/**
* Marching square algorithm for traching the contour of a pixel group
* https://www.emanueleferonato.com/2013/03/01/using-marching-squares-algorithm-to-trace-the-contour-of-an-image/
* @param potentialArea
* @param threshold
*/
function MarchingSquares(contour, potentialArea, threshold) {
let marched = false;
const getVal = (x: number, y: number) => {
return potentialArea.cells[x + y * potentialArea.width];
};
const getState = (x: number, y: number) => {
let squareVal = 0;
if (getVal(x - 1, y - 1) >= threshold) {
squareVal += 1;
}
if (getVal(x, y - 1) > threshold) {
squareVal += 2;
}
if (getVal(x - 1, y) > threshold) {
squareVal += 4;
}
if (getVal(x, y) > threshold) {
squareVal += 8;
}
return squareVal;
};
const doMarch = (xPos: number, yPos: number) => {
let x = xPos;
let y = yPos;
let prevX;
let prevY;
for (let i = 0; i < potentialArea.width * potentialArea.height; i++) {
prevX = x;
prevY = y;
if (contour.findIndex((item) => item.x === x && item.y === y) > -1) {
if (contour[0].x !== x || contour[0].y !== y) {
// encountered a loop but haven't returned to start: change direction using conditionals and continue back to start
} else {
return true;
}
} else {
contour.push({ x, y });
}
const state = getState(x, y);
// assign the move direction according to state of the square
switch (state) {
case -1:
console.warn('Marched out of bounds');
return true;
case 0:
case 3:
case 2:
case 7:
x++; // go right
break;
case 12:
case 14:
case 4:
x--; // go left
break;
case 6: // go left if come from up else go right
if (prevX === 0) {
if (prevY === -1) {
x -= 1;
} else {
x += 1;
}
}
break;
case 1:
case 13:
case 5:
y--; // go up
break;
case 9: // go up if come from right else go down
if (prevX === 1) {
if (prevY === 0) {
y -= 1;
} else {
y += 1;
}
}
break;
case 10:
case 8:
case 11:
y++; // go down
break;
default:
console.warn(`Marching squares invalid state: ${state}`);
return true;
}
}
};
this.march = () => {
for (let x = 0; x < potentialArea.width && !marched; x += 1) {
for (let y = 0; y < potentialArea.height && !marched; y += 1) {
if (getVal(x, y) > threshold && getState(x, y) !== 15) {
marched = doMarch(x, y);
}
}
}
return marched;
};
}
/**
* Space partition & assign value to each cell
* @param points
*/
const initGridCells = (width: number, height: number, pixelGroupSize: number) => {
const scaleWidth = Math.ceil(width / pixelGroupSize);
const scaleHeight = Math.ceil(height / pixelGroupSize);
const gridCells = new Float32Array(Math.max(0, scaleWidth * scaleHeight)).fill(0);
return {
cells: gridCells,
width: scaleWidth,
height: scaleHeight,
};
};
/**
* Find the optimal already visited member to item;
Optimal: minimize cost(j) = distance(i,j) ∗ countObstacles(i,j)
* @param item
* @param visited
*/
const pickBestNeighbor = (item: Item, visited: Item[], nonMembers: Item[]): Item | null => {
let closestNeighbour = null;
let minCost = Number.POSITIVE_INFINITY;
visited.forEach((neighbourItem) => {
const itemP = { x: item.getModel().x, y: item.getModel().y };
const neighbourItemP = { x: neighbourItem.getModel().x, y: neighbourItem.getModel().y };
const dist = squareDist(itemP, neighbourItemP);
const directLine = new Line(itemP.x, itemP.y, neighbourItemP.x, neighbourItemP.y);
const numberObstacles = nonMembers.reduce((count, _item) => {
if (fractionToLine(_item, directLine) > 0) {
return count + 1;
}
return count;
}, 0);
if (dist * (numberObstacles + 1) ** 2 < minCost) {
closestNeighbour = neighbourItem;
minCost = dist * (numberObstacles + 1) ** 2;
}
});
return closestNeighbour;
};
/**
* 返回和线相交的item中,离边的起点最近的item
* @param items
* @param line
*/
const getIntersectItem = (items: Item[], line: Line): Item | null => {
let minDistance = Number.POSITIVE_INFINITY;
let closestItem = null;
items.forEach((item) => {
const distance = fractionToLine(item, line);
// find closest intersection
if (distance >= 0 && distance < minDistance) {
closestItem = item;
minDistance = distance;
}
});
return closestItem;
};
/**
* Modify the directLine and Route virtual edges around obstacles
*/
const computeRoute = (
directLine: Line,
nonMembers: Item[],
maxRoutingIterations: number,
morphBuffer: number,
): Line[] => {
const checkedLines: Line[] = [];
const linesToCheck: Line[] = [];
linesToCheck.push(directLine);
let hasIntersection = true;
let iterations = 0;
const pointExists = (point: IPoint, lines: Line[]) => {
let flag = false;
lines.forEach((line) => {
if (flag) return;
if (
isPointsOverlap(point, { x: line.x1, y: line.y1 }) ||
isPointsOverlap(point, { x: line.x2, y: line.y2 })
) {
flag = true;
}
});
return flag;
};
const isPointInNonMembers = (point: IPoint, _nonMembers: Item[]) => {
for (const item of _nonMembers) {
const bbox = item.getBBox();
const itemContour = [
[bbox.x, bbox.y],
[bbox.x + bbox.width, bbox.y],
[bbox.x, bbox.y + bbox.height],
[bbox.x + bbox.width, bbox.y + bbox.height],
];
if (isPointInPolygon(itemContour, point.x, point.y)) {
return true;
}
}
return false;
};
// outer loop end when no more intersections or out of iterations
while (hasIntersection && iterations < maxRoutingIterations) {
hasIntersection = false;
// inner loop end when out of lines or found an intersection
while (!hasIntersection && linesToCheck.length) {
const line = linesToCheck.pop();
const closestItem = getIntersectItem(nonMembers, line);
if (closestItem) {
const [intersections, countIntersections] = itemIntersectByLine(closestItem, line);
// if line passes through item
if (countIntersections === 2) {
const testReroute = (isFirst: boolean) => {
let tempMorphBuffer = morphBuffer;
let virtualNode = rerouteLine(closestItem, tempMorphBuffer, intersections, isFirst);
// test the virtualNode already exists
let exist =
pointExists(virtualNode, linesToCheck) || pointExists(virtualNode, checkedLines);
let pointInside = isPointInNonMembers(virtualNode, nonMembers);
while (!exist && pointInside && tempMorphBuffer >= 1) {
// try a smaller buffer
tempMorphBuffer /= 1.5;
virtualNode = rerouteLine(closestItem, tempMorphBuffer, intersections, isFirst);
exist =
pointExists(virtualNode, linesToCheck) || pointExists(virtualNode, checkedLines);
pointInside = isPointInNonMembers(virtualNode, nonMembers);
}
// 第二次route时不要求pointInside
if (virtualNode && !exist && (!isFirst || !pointInside)) {
// add 2 rerouted lines to check
linesToCheck.push(new Line(line.x1, line.y1, virtualNode.x, virtualNode.y));
linesToCheck.push(new Line(virtualNode.x, virtualNode.y, line.x2, line.y2));
hasIntersection = true;
}
};
testReroute(true);
if (!hasIntersection) {
// if we didn't find a valid point around the first corner, try the second
testReroute(false);
}
}
}
// no intersection found, mark this line as completed
if (!hasIntersection) {
checkedLines.push(line);
}
iterations += 1;
}
}
// 加入剩余的线
while (linesToCheck.length) {
checkedLines.push(linesToCheck.pop());
}
return checkedLines;
};
/**
* Connect item with visited members using direct line or virtual edges
*/
function getRoute(
item: Item,
nonMembers: Item[],
visited: Item[],
maxRoutingIterations: number,
morphBuffer: number,
) {
const optimalNeighbor = pickBestNeighbor(item, visited, nonMembers);
if (optimalNeighbor === null) {
return [];
}
// merge the consecutive lines
const mergeLines = (checkedLines: Line[]): Line[] => {
const finalRoute: Line[] = [];
while (checkedLines.length > 0) {
const line1 = checkedLines.pop()!;
if (checkedLines.length === 0) {
finalRoute.push(line1);
break;
}
const line2 = checkedLines.pop()!;
const mergeLine = new Line(line1.x1, line1.y1, line2.x2, line2.y2);
const closestItem = getIntersectItem(nonMembers, mergeLine);
// merge most recent line and previous line
if (!closestItem) {
checkedLines.push(mergeLine);
} else {
finalRoute.push(line1);
checkedLines.push(line2);
}
}
return finalRoute;
};
const directLine = new Line(
item.getModel().x,
item.getModel().y,
optimalNeighbor.getModel().x,
optimalNeighbor.getModel().y,
);
const checkedLines = computeRoute(directLine, nonMembers, maxRoutingIterations, morphBuffer);
const finalRoute = mergeLines(checkedLines);
return finalRoute;
}
/**
* Calculate the countor that includes the selected items and exclues the non-selected items
* @param graph
* @param members
* @param nonMembers
* @param options
*/
export const genBubbleSet = (members: Item[], nonMembers: Item[], ops?: BubblesetCfg) => {
// eslint-disable-next-line no-redeclare
const options = Object.assign(defaultOps, ops);
const centroid = getPointsCenter(
members.map((item) => ({ x: item.getModel().x, y: item.getModel().y })),
);
// 按照到中心距离远近排序
members = members.sort(
(a, b) =>
squareDist({ x: a.getModel().x, y: a.getModel().y }, centroid) -
squareDist({ x: b.getModel().x, y: b.getModel().y }, centroid),
);
const visited: Item[] = [];
const virtualEdges: Line[] = [];
members.forEach((item) => {
const lines = getRoute(
item,
nonMembers,
visited,
options.maxRoutingIterations,
options.morphBuffer,
);
lines.forEach((l) => {
virtualEdges.push(l);
});
visited.push(item);
});
// 由于edge也可以作为member和nonMember传入,暂时不考虑把edges作为参数传入genBubbleSet
// edges && edges.forEach(e => {
// virtualEdges.push(new Line(e.getSource().getModel().x, e.getSource().getModel().y, e.getTarget().getModel().x, e.getTarget().getModel().y));
// });
const activeRegion = getActiveRregion(members, virtualEdges, options.nodeR0);
const potentialArea = initGridCells(
activeRegion.width,
activeRegion.height,
options.pixelGroupSize,
);
// Use march squares to generate contour
let contour = [];
let hull = [];
for (let iterations = 0; iterations < options.maxMarchingIterations; iterations++) {
fillPotentialArea(members, nonMembers, virtualEdges, activeRegion, potentialArea, options);
contour = [];
hull = [];
if (!new MarchingSquares(contour, potentialArea, options.threshold).march()) continue;
const marchedPath = contour.map((point) => ({
x: Math.round(point.x * options.pixelGroupSize + activeRegion.minX),
y: Math.round(point.y * options.pixelGroupSize + activeRegion.minY),
}));
// const marchedPath = marchingSquares(potentialArea, options.threshold).map(point => ({ x: Math.round(point.x * options.pixelGroupSize + activeRegion.minX), y: Math.round(point.y * options.pixelGroupSize + activeRegion.minY) }))
if (marchedPath) {
let size = marchedPath.length;
if (options.skip > 1) {
size = Math.floor(marchedPath.length / options.skip);
// if we reduced too much (fewer than three points in reduced surface) reduce skip and try again
while (size < 3 && options.skip > 1) {
options.skip -= 1;
size = Math.floor(marchedPath.length / options.skip);
}
}
// copy hull values
for (let i = 0, j = 0; j < size; j += 1, i += options.skip) {
hull.push({ x: marchedPath[i].x, y: marchedPath[i].y });
}
}
const isContourValid = () => {
for (const item of members) {
const hullPoints = hull.map((point) => [point.x, point.y]);
if (!isPointInPolygon(hullPoints, item.getBBox().centerX, item.getBBox().centerY))
return false;
}
// 不强制要求所有nonMembers都没有包含在内
// for (const item of nonMembers) {
// if (isPointInPolygon({ x: item.getBBox().centerX, y: item.getBBox().centerY }, contour)) return false
// }
return true;
};
if (hull && isContourValid()) {
return hull;
}
// update parameters for next iteraction
options.threshold *= 0.9;
if (iterations <= options.maxMarchingIterations * 0.5) {
options.memberInfluenceFactor *= 1.2;
options.edgeInfluenceFactor *= 1.2;
} else if (options.nonMemberInfluenceFactor !== 0 && nonMembers.length > 0) {
// after half the iterations, start increasing positive energy and lowering the threshold
options.nonMemberInfluenceFactor *= 0.8;
} else {
break;
}
}
return hull;
};
/**
* unionboundingbox
* @param members
* @param edges
*/
function getActiveRregion(members: Item[], edges: Line[], offset: number): IBBox {
const activeRegion = {
minX: Number.POSITIVE_INFINITY,
minY: Number.POSITIVE_INFINITY,
maxX: Number.NEGATIVE_INFINITY,
maxY: Number.NEGATIVE_INFINITY,
width: 0,
height: 0,
x: 0,
y: 0,
};
const bboxes = [];
members.forEach((item) => {
bboxes.push(item.getBBox());
});
edges.forEach((l) => {
bboxes.push(l.getBBox());
});
for (const bbox of bboxes) {
activeRegion.minX = (bbox.minX < activeRegion.minX ? bbox.minX : activeRegion.minX) - offset;
activeRegion.minY = (bbox.minY < activeRegion.minY ? bbox.minY : activeRegion.minY) - offset;
activeRegion.maxX = (bbox.maxX > activeRegion.maxX ? bbox.maxX : activeRegion.maxX) + offset;
activeRegion.maxY = (bbox.maxY > activeRegion.maxY ? bbox.maxY : activeRegion.maxY) + offset;
}
activeRegion.width = activeRegion.maxX - activeRegion.minX;
activeRegion.height = activeRegion.maxY - activeRegion.minY;
activeRegion.x = activeRegion.minX;
activeRegion.y = activeRegion.minY;
return activeRegion;
}
function fillPotentialArea(
members: Item[],
nonMembers: Item[],
edges: Line[],
activeRegion: IBBox,
potentialArea,
options: BubblesetCfg,
) {
function pos2GridIx(x, offset) {
const gridIx = Math.floor((x - offset) / options.pixelGroupSize);
return gridIx < 0 ? 0 : gridIx;
}
function gridIx2Pos(x, offset) {
return x * options.pixelGroupSize + offset;
}
// using inverse a for numerical stability
const nodeInfA = (options.nodeR0 - options.nodeR1) * (options.nodeR0 - options.nodeR1);
const edgeInfA = (options.edgeR0 - options.edgeR1) * (options.edgeR0 - options.edgeR1);
const getAffectedRegion = (bbox, thresholdR) => {
const startX = Math.min(
pos2GridIx(bbox.minX, thresholdR + activeRegion.minX),
potentialArea.width,
);
const startY = Math.min(
pos2GridIx(bbox.minY, thresholdR + activeRegion.minY),
potentialArea.height,
);
const endX = Math.min(
pos2GridIx(bbox.maxX, -thresholdR + activeRegion.minX),
potentialArea.width,
);
const endY = Math.min(
pos2GridIx(bbox.maxY, -thresholdR + activeRegion.minY),
potentialArea.height,
);
return [startX, startY, endX, endY];
};
const addItemInfluence = (item: Item, influenceFactor: number) => {
const bbox = item.getBBox();
const [startX, startY, endX, endY] = getAffectedRegion(bbox, options.nodeR1);
// calculate item influence for each cell
for (let y = startY; y < endY; y += 1) {
for (let x = startX; x < endX; x += 1) {
if (influenceFactor < 0 && potentialArea[x + y * potentialArea.width] <= 0) {
continue;
}
const tempX = gridIx2Pos(x, activeRegion.minX);
const tempY = gridIx2Pos(y, activeRegion.minY);
const distanceSq = pointRectSquareDist(
{ x: tempX, y: tempY },
{ x: bbox.minX, y: bbox.minY, width: bbox.width, height: bbox.height },
);
if (distanceSq < options.nodeR1 ** 2) {
const dr = Math.sqrt(distanceSq) - options.nodeR1;
potentialArea.cells[x + y * potentialArea.width] += influenceFactor * dr * dr;
}
}
}
};
const addEdgeInfluence = (line: Line, influenceFactor: number) => {
const bbox = line.getBBox();
const [startX, startY, endX, endY] = getAffectedRegion(bbox, options.edgeR1);
// for every point in active part of potentialArea, calculate distance to nearest point on line and add influence
for (let y = startY; y < endY; y += 1) {
for (let x = startX; x < endX; x += 1) {
if (influenceFactor < 0 && potentialArea.cells[x + y * potentialArea.width] <= 0) {
continue;
}
const tempX = gridIx2Pos(x, activeRegion.minX);
const tempY = gridIx2Pos(y, activeRegion.minY);
const minDistanceSq = pointLineSquareDist({ x: tempX, y: tempY }, line);
// only influence if less than r1
if (minDistanceSq < options.edgeR1 ** 2) {
const mdr = Math.sqrt(minDistanceSq) - options.edgeR1;
potentialArea.cells[x + y * potentialArea.width] += influenceFactor * mdr * mdr;
}
}
}
};
if (options.nodeInfluenceFactor) {
members.forEach((item) => {
addItemInfluence(item, options.nodeInfluenceFactor / nodeInfA);
});
}
if (options.edgeInfluenceFactor) {
edges.forEach((edge) => {
addEdgeInfluence(edge, options.edgeInfluenceFactor / edgeInfA);
});
}
if (options.negativeNodeInfluenceFactor) {
nonMembers.forEach((item) => {
addItemInfluence(item, options.negativeNodeInfluenceFactor / nodeInfA);
});
}
}
function rerouteLine(item, buffer: number, intersections: IPoint[], wrapNormal: boolean): IPoint {
const bbox = item.getBBox();
const [topIntersect, leftIntersect, bottomIntersect, rightIntersect] = intersections;
const cornerPos = {
topLeft: { x: bbox.minX - buffer, y: bbox.minY - buffer },
topRight: { x: bbox.maxX + buffer, y: bbox.minY - buffer },
bottomLeft: { x: bbox.minX - buffer, y: bbox.maxY + buffer },
bottomRight: { x: bbox.maxX + buffer, y: bbox.maxY + buffer },
};
const totalArea = bbox.height * bbox.width;
function calcHalfArea(intersect1, intersect2) {
return bbox.width * ((intersect1.y - bbox.minY + (intersect2.y - bbox.minY)) * 0.5);
}
// 根据线和boundingbox相交的情况,确定control point的位置
if (leftIntersect) {
// 相交区域有三角形
if (topIntersect) return wrapNormal ? cornerPos.topLeft : cornerPos.bottomRight;
if (bottomIntersect) return wrapNormal ? cornerPos.bottomLeft : cornerPos.topRight;
// 相交区域分成上下两个梯形,比较面积
const topArea = calcHalfArea(leftIntersect, rightIntersect);
if (topArea < totalArea * 0.5) {
if (leftIntersect.y > rightIntersect.y)
return wrapNormal ? cornerPos.topLeft : cornerPos.bottomRight;
return wrapNormal ? cornerPos.topRight : cornerPos.bottomLeft;
}
if (leftIntersect.y < rightIntersect.y)
return wrapNormal ? cornerPos.bottomLeft : cornerPos.topRight;
return wrapNormal ? cornerPos.bottomRight : cornerPos.topLeft;
}
if (rightIntersect) {
if (topIntersect) return wrapNormal ? cornerPos.topRight : cornerPos.bottomLeft;
if (bottomIntersect) return wrapNormal ? cornerPos.bottomRight : cornerPos.topLeft;
}
// 相交区域分成左右两个梯形
const leftArea = calcHalfArea(topIntersect, bottomIntersect);
if (leftArea < totalArea * 0.5) {
if (topIntersect.x > bottomIntersect.x)
return wrapNormal ? cornerPos.topLeft : cornerPos.bottomRight;
return wrapNormal ? cornerPos.bottomLeft : cornerPos.topRight;
}
if (topIntersect.x < bottomIntersect.x)
return wrapNormal ? cornerPos.topRight : cornerPos.bottomLeft;
return wrapNormal ? cornerPos.bottomRight : cornerPos.topLeft;
} | the_stack |
declare var Windows;
module CorsicaTests {
"use strict";
var glob;
var DatePicker = <typeof WinJS.UI.PrivateDatePicker> WinJS.UI.DatePicker;
function process(root) {
return WinJS.UI.processAll(root);
}
function isWinRTEnabled() {
if (window && window['Windows']) {
glob = Windows.Globalization;
return true;
}
return false;
}
var elementToBeRemoved;
function createPickerWithAppend(options?) {
var dateObject = null;
if (options && options.current) {
// NOTE: If the 'current' property is a string, then we want to
// set the property declaratively. If it is a date object, we
// set it imperatively. This is because stringifying the date
// object and reparsing it yields different results on different
// browser implementations.
if (typeof options.current === "string") {
options.current = options.current + " 12:00pm";
} else {
dateObject = options.current;
dateObject.setHours(12);
delete options.current;
}
}
var dp = document.createElement('div');
elementToBeRemoved = dp;
document.body.appendChild(dp);
dp.setAttribute('data-win-control', 'WinJS.UI.DatePicker');
if (options) {
dp.setAttribute('data-win-options', JSON.stringify(options));
}
// NOTE: The datetime UI is created in a deferred UI manner so
// we need to have the timeout() to allow the browser to go through
// a few cycles before returning the object.
return process(dp).then(function () {
return WinJS.Promise.timeout().then(function () {
if (dateObject) {
dp.winControl.current = dateObject;
}
return dp;
});
});
}
// return the select element containing the day component
function dateElement(picker) {
return picker.querySelector('.win-datepicker .win-datepicker-date');
}
// return the select element containing the month component
function monthElement(picker) {
return picker.querySelector('.win-datepicker .win-datepicker-month');
}
// if YEAR component is hidden, this will return null
// return the select element containing the year component
function yearElement(picker) {
return picker.querySelector('.win-datepicker .win-datepicker-year');
}
function dateToString(date) {
var result = "";
if ('month' in date) result += "m=" + date.month.toString() + " ";
if ('day' in date) result += "d=" + date.day.toString() + " ";
if ('year' in date) result += "y=" + date.year.toString() + " ";
return result;
}
// date object can contain values for 'day', 'month', 'year'. If any of these values
// is not present, function will expect querySelector to return null for that cell.
function verifyDate(picker, date) {
// 'March' and '03' (day) by default.
// >>> more clear to use descrete strings to compare in each test case
LiveUnit.LoggingCore.logComment("picker.winControl.current=" + picker.winControl.current + "; expected=" + dateToString(date));
if ('day' in date) {
LiveUnit.Assert.areEqual(date.day.toString() >> 0, dateElement(picker).value >> 0);
} else {
LiveUnit.Assert.areEqual(null, dateElement(picker).value);
}
if ('month' in date) {
LiveUnit.Assert.areEqual(date.month.toString() >> 0, monthElement(picker).selectedIndex + 1);
}
if ('year' in date) {
LiveUnit.Assert.areEqual(date.year.toString(), yearElement(picker).value);
} else {
LiveUnit.Assert.areEqual(null, yearElement(picker).value);
}
}
function unhandledTestError(msg) {
try {
LiveUnit.Assert.fail("unhandled test exception: " + msg);
} catch (ex) {
// don't rethrow assertion failure exception
}
}
// returns the text from the selected option of the specified select control
function getText(selectElement) {
return selectElement.options[selectElement.selectedIndex].text;
}
var dateBackEnd, monthBackEnd, yearBackEnd;
var datePicker;
function checkValues(e) {
var d = new Date(datePicker.winControl.current);
LiveUnit.Assert.areEqual(dateBackEnd, d.getDate(), "The backend date object has a wrong day value");
LiveUnit.Assert.areEqual(monthBackEnd, d.getMonth(), "The backend date object has a wrong month value");
LiveUnit.Assert.areEqual(yearBackEnd, d.getFullYear(), "The backend date object has a wrong year value");
}
function addChangeEvent(picker) {
picker.addEventListener("change", checkValues);
return function () { picker.removeListener(picker, checkValues); };
}
function daysInMonth(y, m) {
if (isWinRTEnabled()) {
var c = new Windows.Globalization.Calendar();
c.month = m + 1;
c.year = y;
return c.numberOfDaysInThisMonth;
}
return new Date(y, m + 1, 0, 12, 0).getDate();
}
function lastDayInMonth(y, m) {
if (isWinRTEnabled()) {
var c = new Windows.Globalization.Calendar();
c.month = m + 1;
c.year = y;
return c.numberOfDaysInThisMonth + c.firstDayInThisMonth - 1;
}
return new Date(y, m + 1, 0, 12, 0).getDate();
}
//;
// I am thinking of removing the check boolean
// using the three functions help us mimic the user selection scenario
// rather than just creating one function with all the changes in it
//
function setMonth(picker, m, notFire = false) {
var selectMonthElement = monthElement(picker);
monthBackEnd = m - 1;
selectMonthElement.selectedIndex = monthBackEnd;
var numOfDays = lastDayInMonth(yearBackEnd, monthBackEnd);
if (dateBackEnd > numOfDays)
dateBackEnd = numOfDays;
if (!notFire) {
fireOnchange(selectMonthElement);
}
}
function setYear(picker, y, notFire = false) {
var selectYearElement = yearElement(picker);
yearBackEnd = y;
selectYearElement.value = yearBackEnd;
var numOfDays = daysInMonth(yearBackEnd, monthBackEnd);
if (dateBackEnd > numOfDays)
dateBackEnd = numOfDays;
if (!notFire) {
fireOnchange(selectYearElement);
}
}
function setDate(picker, d, notFire = false) {
var selectDayElement = dateElement(picker);
dateBackEnd = d;
selectDayElement.selectedIndex = dateBackEnd - 1;
if (!notFire)
fireOnchange(selectDayElement);
}
function setValues() {
var dateObj = new Date();
dateBackEnd = dateObj.getDate();
monthBackEnd = dateObj.getMonth();
yearBackEnd = dateObj.getFullYear();
}
function cleanupDatePicker() {
try {
WinJS.Utilities.disposeSubTree(elementToBeRemoved);
document.body.removeChild(elementToBeRemoved);
elementToBeRemoved = null;
if (!isWinRTEnabled()) {
DatePicker.getInformation = DatePicker._getInformationJS;
}
else {
DatePicker.getInformation = DatePicker._getInformationWinRT;
}
} catch (e) {
LiveUnit.Assert.fail("cleanupDatePicker() failed: " + e);
}
}
var changeHit,
datechangeHit,
monthchangeHit,
yearchangeHit;
var changeType = "change";
function logEventHits(e) {
LiveUnit.LoggingCore.logComment(e.type + ": changeHit=" + changeHit);
}
// note: change event only fires when changing value through UI
var changeHandler = function (e) {
changeHit++;
LiveUnit.Assert.areEqual(e.type, changeType);
logEventHits(e);
};
function attachEventListeners(picker) {
changeHit = datechangeHit = monthchangeHit = yearchangeHit = 0;
picker.addEventListener(changeType, changeHandler, false);
}
function removeEventListeners(picker) {
changeHit = 0;
picker.removeEventListener(changeType, changeHandler);
}
// fire a 'change' event on the provided target element
function fireOnchange(targetElement) {
var myEvent = document.createEvent('HTMLEvents');
myEvent.initEvent('change', true, false);
targetElement.dispatchEvent(myEvent);
}
var dateObjectUI, dataObjectBackEnd;
var supportedCalenders = ["GregorianCalendar", "HijriCalendar", "HebrewCalendar", "JapaneseCalendar", "KoreanCalendar", "ThaiCalendar", "TaiwanCalendar", "UmAlQuraCalendar", "JulianCalendar"];
function getActualUIOrder() {
var domElement:any = document.getElementsByClassName('win-datepicker')[0];
var datePos,
monthPos,
yearPos;
for (var i = 0; i < 3; i++) {
var elem = domElement.childNodes[i].className;
if (elem.indexOf('picker-date') !== -1) {
datePos = i;
}
else if (elem.indexOf('picker-month') !== -1) {
monthPos = i;
}
else {
yearPos = i;
}
}
return getOrder(datePos, monthPos, yearPos);
}
function getOrder(datePos, monthPos, yearPos) {
if (monthPos < yearPos && monthPos < datePos) {
if (yearPos < datePos)
return "MYD";
else
return "MDY";
}
else if (yearPos < monthPos && yearPos < datePos) {
if (monthPos < datePos)
return "YMD";
else
return "YDM";
}
else {
if (yearPos < monthPos)
return "DYM";
else
return "DMY";
}
}
function getExpectedOrder(calendar) {
var dtf = Windows.Globalization.DateTimeFormatting;
var s = "day month.full year";
var c = new dtf.DateTimeFormatter(s);
var formatter = new dtf.DateTimeFormatter(s, c.languages, c.geographicRegion, calendar, c.clock);
var pattern = formatter.patterns[0];
return getOrder(pattern.indexOf("day"), pattern.indexOf("month"), pattern.indexOf("year"));
}
function removeLeadingZeros(val) {
var i = 0;
while (val.charAt(i) === '0') i++;
var t = '';
while (i < val.length)
t += val.charAt(i++);
return t;
}
function checkGlobValues(e) {
var d = new Date(datePicker.winControl.current);
var month = monthElement(datePicker);
var year = yearElement(datePicker);
var day = dateElement(datePicker);
var temp = currentCalendar || '';
LiveUnit.Assert.areEqual(dataObjectBackEnd.dateBackEnd, d.getDate(), "The backend date object has a wrong day value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
LiveUnit.Assert.areEqual(dataObjectBackEnd.monthBackEnd, d.getMonth(), "The backend date object has a wrong month value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
LiveUnit.Assert.areEqual(dataObjectBackEnd.yearBackEnd, d.getFullYear(), "The backend date object has a wrong year value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
//Check the UI
if (dateObjectUI.dateUI !== day.value && dateObjectUI.dateUI !== removeLeadingZeros(day.value))
LiveUnit.Assert.areEqual(dateObjectUI.dateUI, day.value, "The backend date object has a wrong day value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
if (dateObjectUI.monthUI !== month.value && dateObjectUI.monthUI !== removeLeadingZeros(month.value))
LiveUnit.Assert.areEqual(dateObjectUI.monthUI, month.value, "The backend date object has a wrong month value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
if (dateObjectUI.yearUI !== year.value && dateObjectUI.yearUI !== removeLeadingZeros(year.value))
LiveUnit.Assert.areEqual(dateObjectUI.yearUI, year.value, "The backend date object has a wrong year value " + temp + " seed is " + getSeed() + " and number of Random Ticks is " + getCount());
}
function addGlobChangeEvent(picker) {
picker.addEventListener("change", checkGlobValues);
return function () { picker.removeListener(picker, checkGlobValues); };
}
function setBackEnd(calendar) {
calendar.changeCalendarSystem("GregorianCalendar");
dataObjectBackEnd = {
yearBackEnd: calendar.year,
monthBackEnd: calendar.month - 1,
dateBackEnd: calendar.day
};
}
function setUI(myCalendar, calendarType) {
myCalendar.changeCalendarSystem(calendarType);
var dtf = Windows.Globalization.DateTimeFormatting;
var s = "shortdate";
var c = new dtf.DateTimeFormatter(s);
var formatter = new dtf.DateTimeFormatter(s, c.languages, c.geographicRegion, calendarType, c.clock);
var pattern = formatter.patterns[0];
var era = '';
if (pattern.indexOf("era") !== -1)
era = " " + myCalendar.eraAsString();
dateObjectUI = {
yearUI: myCalendar.yearAsString() + era,
monthUI: myCalendar.monthAsString(),
dateUI: myCalendar.dayAsString()
};
}
function getControls(picker) {
return { yearSelect: yearElement(picker), monthSelect: monthElement(picker), dateSelect: dateElement(picker) };
}
function checkMonthNames(selectControls, calendarName, year?) {
var monthSelect = selectControls.monthSelect;
if (isWinRTEnabled()) {
var c = new glob.Calendar();
var myCalendar = new glob.Calendar(c.languages, calendarName, c.getClock());
if (year) {
year = parseInt(year);
//c.addYears(year - c.year);
myCalendar = new glob.Calendar(c.languages, calendarName, c.getClock());
myCalendar.addMonths(monthSelect.selectedIndex + 1 - myCalendar.month);
myCalendar.month = monthSelect.selectedIndex + 1;
myCalendar.addYears(year - myCalendar.year);
myCalendar.day = selectControls.dateSelect.selectedIndex + 1;
//myCalendar.addYears(year - myCalendar.year);
}
var totalNumOfMonths = myCalendar.numberOfMonthsInThisYear;
for (var i = 0; i < totalNumOfMonths; i++) {
myCalendar.addMonths(-1 * myCalendar.month + (i + 1));
LiveUnit.Assert.areEqual(myCalendar.monthAsString(), monthSelect[i].value, "Incorrect month name");
}
}
}
function hijriLeapYear(year) {
var yearValue = parseInt(year);
var r = yearValue % 30;
return (r === 2 || r === 5 || r === 7 || r === 10 || r === 13 || r === 16 || r === 18 || r === 21 || r === 24 || r === 26 || r === 29);
}
function isHebrewLeapYear(year) {
var yearValue = parseInt(year);
var r = yearValue % 19;
return (r === 3 || r === 6 || r === 8 || r === 11 || r === 14 || r === 17 || r === 19);
}
function validateMonth(month, isLeap, numOfDays) {
var val = -1;
switch (month) {
case 1:
case 5:
val = 30;
break;
case 4:
val = 29;
break;
case 6:
if (!isLeap)
val = 29;
else
val = 30;
break;
case 7:
if (isLeap)
val = 29;
else
val = 30;
break;
case 8:
if (!isLeap)
val = 29;
else
val = 30;
break;
case 9:
if (!isLeap)
val = 30;
else
val = 29;
break;
case 10:
if (!isLeap)
val = 29;
else
val = 30;
break;
case 11:
if (!isLeap)
val = 30;
else
val = 29;
break;
case 12:
if (!isLeap)
val = 29;
else
val = 30;
break;
case 13:
val = 29;
}
if (val !== -1)
LiveUnit.Assert.areEqual(val, numOfDays, "incorrect number of days in month " + month);
else
if (numOfDays !== 29 && numOfDays !== 30)
LiveUnit.Assert.areEqual(numOfDays, "incorrect number of days in month " + month);
}
function getMaxDay(selectControl) {
var max = -1;
for (var i = 0; i < selectControl.length; i++) {
var t = parseInt(selectControl[i].value);
if (max < t)
max = t;
}
return max;
}
function isGregorianLeapYear(year) {
var val = parseInt(year);
return ((val % 4 === 0 && val % 100 !== 0) || val % 400 === 0);
}
function convertFromKoreanToGreogrianYear(year) {
return year - (4334 - 2001);
}
function numInYear(year) {
var actualYear = '';
for (var i = 0; i < year.length; i++) {
var c = year.charAt(i);
if (c >= '0' && c <= '9')
actualYear += c;
else if (actualYear !== '')
break;
}
return actualYear;
}
function convertFromThaiToGreogrianYear(year) {
return year - (2544 - 2001);
}
function isJulianLeapYear(year) {
return (year % 4 === 0);
}
function convertFromTaiwanToGreogrianYear(year) {
return year + (2001 - 90); //because 2001 represents 90
}
//These are stubs until replaced with the ABI
function getSeed() {
return 100;
}
function getCount() {
return 2;
}
var currentCalendar;
function verifyDatePickerContent(picker, date) {
if ('day' in date) {
LiveUnit.Assert.areEqual(date.day.toString(), dateElement(picker).value);
} else {
LiveUnit.Assert.areEqual(null, dateElement(picker).value);
}
if ('month' in date) {
LiveUnit.Assert.areEqual(date.month.toString(), monthElement(picker).value);
}
if ('year' in date) {
LiveUnit.Assert.areEqual(date.year.toString(), yearElement(picker).value);
} else {
LiveUnit.Assert.areEqual(null, yearElement(picker).value);
}
}
var numofCalls;
var getInformationJS = function (startDate, endDate) {
var minYear = startDate.getFullYear();
var maxYear = endDate.getFullYear();
var yearSource = {
getLength: function () { return Math.max(0, maxYear - minYear + 1); },
getValue: function (index) { return minYear + index; }
};
var months = ["firstMonth", "secondMonth", "thirdMonth", "fourthMonth", "fifthMonth", "sixthMonth"];
var monthSource = function (yearIndex) {
return {
getLength: function () { return months.length; },
getValue: function (index) { return months[index]; },
getMonthNumber: function (index) { return Math.min(index, months.length - 1); }
};
};
var dateSource = function (yearIndex, monthIndex) {
var year = yearSource.getValue(yearIndex);
var maxValue = 0;
switch (monthIndex) {
case 1:
case 2:
case 4:
case 5:
maxValue = 61;
break;
case 0:
if (year % 4 === 0) {
maxValue = 60;
}
else {
maxValue = 59;
}
break;
case 3:
maxValue = 62;
break;
}
return {
getLength: function () { return maxValue; },
getValue: function (index) { return "" + (index + 1); },
getDateNumber: function (index) { return Math.min(index + 1, maxValue); }
};
};
return {
order: ["date", "year", "month"],
getDate: function (index) {
var year = yearSource.getValue(index.year);
var month = monthSource(index.year).getMonthNumber(index.month) * 2;
var numOfDaysInPreviousMonth = (new Date(year, month + 1, 0)).getDate();
month += ((index.date > (new Date(year, month + 1, 0)).getDate()) ? 1 : 0);
var day = (index.date + 1 > numOfDaysInPreviousMonth) ? index.date + 1 - numOfDaysInPreviousMonth : index.date + 1;
day = Math.min(day, (new Date(year, month + 1, 0).getDate()));
return new Date(year, month, day);
},
getIndex: function (date) {
numofCalls++;
var yearIndex = 0;
var year = date.getFullYear();
if (year < minYear) {
yearIndex = 0;
}
else if (year > this.maxYear) {
yearIndex = yearSource.getLength() - 1;
}
else {
yearIndex = date.getFullYear() - minYear;
}
var monthIndex = Math.min(date.getMonth() / 2, monthSource(yearIndex).getLength()) | 0;
var dateIndex;
if (date.getMonth() % 2 === 0) {
dateIndex = Math.min(date.getDate(), dateSource(yearIndex, monthIndex).getLength()) - 1;
}
else {
var dateValue = date.getDate() + (new Date(year, date.getMonth(), 0, 12)).getDate();
dateIndex = Math.min(dateValue, dateSource(yearIndex, monthIndex).getLength()) - 1;
}
return {
year: yearIndex,
month: monthIndex,
date: dateIndex
};
},
years: yearSource,
months: monthSource,
dates: dateSource
};
};
var ControlOrder = function (x, y, w, h) {
this.startX = x;
this.startY = y;
this.width = w;
this.height = h; //to be used later if we decided to change the snap view shape of controls
};
function checkCorrectCSS(controlsPosition) {
for (var i = 0; i < controlsPosition.length - 1; i++) {
if (controlsPosition[i].startY === controlsPosition[i + 1].startY) { // to protect against snap view
LiveUnit.Assert.areEqual(controlsPosition[i].startX + controlsPosition[i].width + 20, controlsPosition[i + 1].startX, "");
}
}
}
function checkDayCount(controls) {
var year = parseInt(controls.yearSelect.value);
var month = controls.monthSelect.selectedIndex;
var maxValue;
switch (month) {
case 1:
case 2:
case 4:
case 5:
maxValue = 61;
break;
case 0:
if (year % 4 === 0) {
maxValue = 60;
}
else {
maxValue = 59;
}
break;
case 3:
maxValue = 62;
break;
}
return maxValue === controls.dateSelect.length;
}
export class DatePickerDecl {
xtestKnownDayWithIndependentStateofSamoa = function (complete) {
var cleanup;
createPickerWithAppend({
current: new Date(2012, 0, 1),
calendar: 'GregorianCalendar'
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("2012", year, "Error in known year in Independent State of Samoa");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Independent State of Samoa");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Independent State of Samoa");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
};
testParameterOfChangeEvent = function (complete) {
var cleanup;
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
datePicker = picker;
cleanup = addChangeEvent(picker);
//testing 31st of December 2015
setValues();
setMonth(picker, 12);
setDate(picker, 31);
setYear(picker, 2015);
verifyDate(picker, { day: 31, month: 12, year: 2015 });
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
};
testMaxAndMinYearInGregorian = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
maxYear: 2000,
minYear: 0
}).
then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("2000", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testSpecialDayWithIndependentStateofSamoa = function (complete) {
var cleanup;
if (isWinRTEnabled()) {
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
datePicker = picker;
cleanup = addChangeEvent(picker);
//testing 31st of January 2012
setValues();
yearBackEnd = 2012;
setYear(picker, 2012);
monthBackEnd = 1;
setMonth(picker, 1);
var selectDayElement = dateElement(picker);
dateBackEnd = 31;
selectDayElement.selectedIndex = selectDayElement.length - 1;
fireOnchange(selectDayElement);
verifyDate(picker, { day: 31, month: 1, year: 2012 });
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else {
complete();
}
};
testMaxAndMinYearInGregorian_temp = function (complete) {
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
maxYear: 50,
minYear: 0
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("50", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testChangeEventParameterInNonLeapYear = function (complete) {
// bug #436665
var cleanup;
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
datePicker = picker;
cleanup = addChangeEvent(picker);
setValues();
setMonth(picker, 12);
setDate(picker, 31);
setYear(picker, 2015);
verifyDate(picker, { day: 31, month: 12, year: 2015 });
//testing 28th of Feb 2015
setMonth(picker, 2);
verifyDate(picker, { day: 28, month: 2, year: 2015 });
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
};
testLeapYearParameter = function (complete) {
// bug #436665
var cleanup;
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
datePicker = picker;
cleanup = addChangeEvent(picker);
setValues();
setMonth(picker, 12);
setDate(picker, 31);
setYear(picker, 2015);
verifyDate(picker, { day: 31, month: 12, year: 2015 });
//testing 28th of Feb 2016
setYear(picker, 2016);
setMonth(picker, 2);
verifyDate(picker, { day: 29, month: 2, year: 2016 });
})
.then(null, unhandledTestError)
.then(cleanup)
.then(cleanupDatePicker) //placed after the error handler to make sure it gets removed even if the test case failed
.then(complete, unhandledTestError);
};
testMonthChange = function (complete) {
var cleanup;
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
datePicker = picker;
cleanup = addChangeEvent(picker);
setValues();
setMonth(picker, 2);
setDate(picker, 28);
setYear(picker, 2015);
verifyDate(picker, { day: 28, month: 2, year: 2015 });
//testing 28th of Feb 2016
setMonth(picker, 3);
verifyDate(picker, { day: 28, month: 3, year: 2015 });
})
.then(null, unhandledTestError)
.then(cleanup)
.then(cleanupDatePicker) //placed after the error handler to make sure it gets removed even if the test case failed
.then(complete, unhandledTestError);
};
testSimpleDate = function (complete) {
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
var today = new Date();
verifyDate(picker, {
day: today.getDate(),
month: today.getMonth() + 1,
year: today.getFullYear()
});
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testDefaults = function (complete) {
// validate datePicker defaults
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
var c = picker.winControl;
LiveUnit.Assert.isFalse(c.disabled);
// verify all 3 elements are displayed, style display=""
LiveUnit.Assert.areEqual("", monthElement(picker).style.display);
LiveUnit.Assert.areEqual("", dateElement(picker).style.display);
LiveUnit.Assert.areEqual("", yearElement(picker).style.display);
var year = new Date().getFullYear();
var min = year - 100;
var max = year + 100;
LiveUnit.Assert.areEqual(min, c.minYear);
LiveUnit.Assert.areEqual(max, c.maxYear);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testDefaultFormats = function (complete) {
// validate datePicker default format
createPickerWithAppend({
current: new Date(1978, 3, 7),
calendar: 'GregorianCalendar'
}).then(function (picker) {
LiveUnit.Assert.areEqual("April", getText(monthElement(picker)));
if (isWinRTEnabled()) {
// .substring(1) strips the direction marker off the text
//
LiveUnit.Assert.areEqual("7", getText(dateElement(picker)).substring(1));
LiveUnit.Assert.areEqual("1978", getText(yearElement(picker)).substring(1));
}
else {
LiveUnit.Assert.areEqual("7", getText(dateElement(picker)));
LiveUnit.Assert.areEqual("1978", getText(yearElement(picker)));
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testSetCurrentFromDate = function (complete) {
createPickerWithAppend({ calendar: 'GregorianCalendar' }).then(function (picker) {
var date = new Date(2011, 1, 3, 10, 11, 12);
picker.winControl.current = date;
verifyDate(picker, { day: 3, month: 2, year: 2011 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testDisabled1 = function (complete) {
// create initial control in disabled state
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1972, 4, 1),
disabled: true
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 5, year: 1972 });
LiveUnit.Assert.isTrue(picker.winControl.disabled);
picker.winControl.disabled = false;
LiveUnit.Assert.isFalse(picker.winControl.disabled);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testDisabled2 = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1968, 2, 3),
disabled: false
}).then(function (picker) {
verifyDate(picker, { day: 3, month: 3, year: 1968 });
LiveUnit.Assert.isFalse(picker.winControl.disabled);
picker.winControl.disabled = true;
LiveUnit.Assert.isTrue(picker.winControl.disabled);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testCustomDate = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2005, 1, 3)
}).then(function (picker) {
verifyDate(picker, { day: 3, month: 2, year: 2005 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testLeapYear = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2000, 1, 29)
}).then(function (picker) {
verifyDate(picker, { day: 29, month: 2, year: 2000 });
})
// handle any errors encountered in createPickerWithAppend() through the unhandledTestError
// function. Note the first parameter is null because we want to call the harness
// provided complete function if error or not. This is handled in the next .then()
// statement.
.then(null, unhandledTestError)
.then(cleanupDatePicker)
// call the harness provided complete function when done. Note complete is called
// when actually complete (no errors) or if there was an error in the previous
// then() statement's error handler.
.then(complete, complete);
};
testNonLeapYear = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2001, 1, 29)
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 3, year: 2001 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
// adjust current year to minYear if current year < minYear
testMinYear = function (complete) {
//BugID: 450489 - closed by design
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1989, 3, 20),
minYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 20, month: 4, year: 2000 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testMaxYear = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2035, 11, 29),
maxYear: 2011
}).then(function (picker) {
verifyDate(picker, { day: 29, month: 12, year: 2011 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testMaxYear2 = function (complete) {
// use current date, apply max year < current date
var today = new Date();
createPickerWithAppend({
calendar: 'GregorianCalendar',
maxYear: 2011,
current: new Date(2121, 0, 2)
}).then(function (picker) {
verifyDate(picker, { day: 2, month: 1, year: 2011 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange1 = function (complete) {
// verify year > range snaps to maxYear
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2021, 11, 1),
maxYear: 2001,
minYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 12, year: 2001 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange1_second = function (complete) {
// verify year < range snaps to min year
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1921, 11, 2),
maxYear: 2001,
minYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 2, month: 12, year: 2000 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange2 = function (complete) {
// verify if minyear > maxyear, maxyear == minyear
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1972, 11, 3),
maxYear: 1995,
minYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 3, month: 12, year: 2000 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange3 = function (complete) {
// verify if minyear > maxyear, minyear == maxyear, different attribute order
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1972, 11, 3),
minYear: 2000,
maxYear: 1995
}).then(function (picker) {
verifyDate(picker, { day: 3, month: 12, year: 1995 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange4 = function (complete) {
// verify when minyear == maxyear, year snaps to minyear
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1935, 11, 4),
maxYear: 1995,
minYear: 1995
}).then(function (picker) {
verifyDate(picker, { day: 4, month: 12, year: 1995 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange5 = function (complete) {
//BugID: 628192
// verify invalid input, minyear && maxyear are ignored
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1942, 11, 5),
maxYear: 1995,
minYear: 0 // UNDONE: -1995 (1996 BC fails in WinRT mode)
}).then(function (picker) {
verifyDate(picker, { day: 5, month: 12, year: 1942 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange5_temp = function (complete) {
// verify invalid input, minyear && maxyear are ignored
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1942, 11, 5),
maxYear: 1995,
minYear: 1910 // UNDONE: -1995 (1996 BC fails in WinRT mode)
}).then(function (picker) {
verifyDate(picker, { day: 5, month: 12, year: 1942 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange6 = function (complete) {
// verify min/max adjustment sequence of squirrely-ness
createPickerWithAppend({
calendar: 'GregorianCalendar',
minYear: 2010,
maxYear: 2020,
current: new Date(2011, 11, 5)
}).then(function (picker) {
verifyDate(picker, { day: 5, month: 12, year: 2011 });
var c = picker.winControl;
c.minYear = 2030; // min > previous max
LiveUnit.Assert.areEqual(2030, c.minYear);
LiveUnit.Assert.areEqual(2030, c.maxYear);
c.maxYear = 2130;
LiveUnit.Assert.areEqual(2030, c.minYear);
LiveUnit.Assert.areEqual(2130, c.maxYear);
c.current = new Date(2100, 6, 6);
verifyDate(picker, { day: 6, month: 7, year: 2100 });
c.maxYear = 1980; // max < previous min so min == max == 1980
LiveUnit.Assert.areEqual(1980, c.minYear);
LiveUnit.Assert.areEqual(1980, c.maxYear);
c.minYear = 2000; // min > max so max == min == 2000
LiveUnit.Assert.areEqual(2000, c.minYear);
LiveUnit.Assert.areEqual(2000, c.maxYear);
c.current = new Date(1990, 1, 3);
verifyDate(picker, { day: 3, month: 2, year: 2000 });
c.maxYear = 2100;
LiveUnit.Assert.areEqual(2100, c.maxYear);
LiveUnit.Assert.areEqual(2000, c.minYear);
c.current = new Date(2050, 2, 5);
verifyDate(picker, { day: 5, month: 3, year: 2050 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange7 = function (complete) {
// verify min year only, year snaps to minyear
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(1990, 1, 1),
minYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 2, year: 2000 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testYearRange8 = function (complete) {
// verify max year only, year doesn't go beyond maxyear
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2100, 1, 1),
maxYear: 2000
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 2, year: 2000 });
var c = picker.winControl;
// verify dates < max year still OK
c.current = new Date(1990, 1, 3);
verifyDate(picker, { day: 3, month: 2, year: 1990 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
xtestDayFormatting = function (complete) {
// bug: win8TFS:245862 - consume real WinJS.Glob formatting not yet implemented
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 11, 1),
dayFormat: 'd'
}).then(function (picker) {
verifyDate(picker, { day: '1', month: 12, year: 2012 });
}).then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 11, 1),
dayFormat: 'dd'
}).then(function (picker) {
verifyDate(picker, { day: '01', month: 12, year: 2012 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 11, 1),
dayFormat: ''
}).then(function (picker) {
verifyDate(picker, { month: 12, year: 2012 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
xtestMonthFormatting = function (complete) {
// bug: win8TFS:245862 - consume real WinJS.Glob formatting not yet implemented
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 2, 12),
monthFormat: 'M'
}).then(function (picker) {
verifyDate(picker, { day: 12, month: '3', year: 2012 });
}).then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 2, 12),
monthFormat: 'MM'
}).then(function (picker) {
verifyDate(picker, { day: 12, month: '03', year: 2012 });
}).then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 2, 12),
monthFormat: 'MMMM'
}).then(function (picker) {
verifyDate(picker, { day: 12, month: 'March', year: 2012 });
}).then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2012, 2, 12),
monthFormat: ''
}).then(function (picker) {
verifyDate(picker, { day: 12, year: 2012 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
xtestYearFormatting = function (complete) {
// bug: win8TFS:245862 - consume real WinJS.Glob formatting not yet implemented
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2023, 11, 1),
yearFormat: 'yy'
}).then(function (picker) {
verifyDate(picker, { day: 1, month: 12, year: '23' });
}).then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2056, 11, 4),
yearFormat: 'yyyy'
})
.then(function (picker) {
verifyDate(picker, { day: '04', month: 12, year: '2056' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker);
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2089, 11, 7),
yearFormat: ''
})
.then(function (picker) {
verifyDate(picker, { day: 7, month: 12 });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testFireMonthchangeEvent = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
fireOnchange(monthElement(picker));
LiveUnit.Assert.areEqual(1, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testFireDatechangeEvent = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
fireOnchange(dateElement(picker));
LiveUnit.Assert.areEqual(1, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testFireYearchangeEvent = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
fireOnchange(yearElement(picker));
LiveUnit.Assert.areEqual(1, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testFireAllEventsAndRemove = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
verifyDate(picker, { day: '01', month: '04', year: '2011' });
fireOnchange(monthElement(picker));
verifyDate(picker, { day: '01', month: '04', year: '2011' });
fireOnchange(dateElement(picker));
verifyDate(picker, { day: '01', month: '04', year: '2011' });
fireOnchange(yearElement(picker));
verifyDate(picker, { day: '01', month: '04', year: '2011' });
LiveUnit.Assert.areEqual(3, changeHit);
// remove event listeners, verify events not fired
removeEventListeners(picker);
fireOnchange(monthElement(picker));
fireOnchange(dateElement(picker));
fireOnchange(yearElement(picker));
LiveUnit.Assert.areEqual(0, changeHit);
// make sure date hasn't changed
verifyDate(picker, { day: '01', month: '04', year: '2011' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testFireMultipleChangeEvents = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
for (var n = 1; n <= 15; n++) {
fireOnchange(monthElement(picker));
fireOnchange(dateElement(picker));
fireOnchange(yearElement(picker));
LiveUnit.Assert.areEqual(n * 3, changeHit);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testSettingDateObject = function (complete) {
// BUG: win8TFS: 245862 - consume real WinJS.Glob
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
// note: javascript Date object has 0 based months (0 == January)
// so Feb 03, 2011 == Date(2011, 02, 03)
var date = new Date(2011, 1, 3);
picker.winControl.current = date;
verifyDate(picker, { day: '03', month: '02', year: '2011' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testdatechangeEvent = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
verifyDate(picker, { day: '01', month: '04', year: '2011' });
attachEventListeners(picker);
// change the day
picker.winControl.current = 'April 02, 2011';
verifyDate(picker, { day: '02', month: '04', year: '2011' });
LiveUnit.Assert.areEqual(0, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testmonthchangeEvent = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
verifyDate(picker, { day: '01', month: '04', year: '2011' });
attachEventListeners(picker);
// change the month
picker.winControl.current = 'May 01, 2011';
verifyDate(picker, { day: '01', month: '05', year: '2011' });
LiveUnit.Assert.areEqual(0, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testyearchangeEvent = function (complete) {
// BUG: 266243 datePicker needs to use Date.getDate() to compare dates instead of Date.getDay() - day of week
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
verifyDate(picker, { day: '01', month: '04', year: '2011' });
attachEventListeners(picker);
// change the year
picker.winControl.current = 'April 01, 2012';
verifyDate(picker, { day: '01', month: '04', year: '2012' });
LiveUnit.Assert.areEqual(0, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testCurrentAttribute = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 0, 1)
}).then(function (picker) {
var current = picker.winControl.current;
verifyDate(picker, { day: '01', month: '01', year: '2011' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testThreeEventsAndRemove = function (complete) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 3, 1)
}).then(function (picker) {
attachEventListeners(picker);
verifyDate(picker, { day: '01', month: '04', year: '2011' });
// change all 3
picker.winControl.current = 'June 03, 2013';
verifyDate(picker, { day: '03', month: '06', year: '2013' });
LiveUnit.Assert.areEqual(0, changeHit);
// remove event listeners, verify events still not received
removeEventListeners(picker);
picker.winControl.current = new Date(2014, 6, 4);
verifyDate(picker, { day: '04', month: '07', year: '2014' });
LiveUnit.Assert.areEqual(0, changeHit);
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
};
testHijriCalender = function (complete) {
//BugID: 446784
if (isWinRTEnabled()) {
var calendarType = 'HijriCalendar';
var cleanup;
createPickerWithAppend({
calendar: calendarType,
current: new Date(2011, 9, 25)
}).then(function (picker) {
datePicker = picker;
cleanup = addGlobChangeEvent(picker);
var c = new glob.Calendar();
var myCalendar = new glob.Calendar(c.languages, "GregorianCalendar", c.getClock());
myCalendar.year = 2011;
myCalendar.month = 10;
myCalendar.day = 25;
myCalendar.changeCalendarSystem("HijriCalendar");
setBackEnd(myCalendar);
setUI(myCalendar, calendarType);
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendarType), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendarType);
myCalendar.addMonths(1);
++selectControls.monthSelect.selectedIndex;
setBackEnd(myCalendar);
setUI(myCalendar, calendarType);
fireOnchange(selectControls.monthSelect);
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testUpdateDatePickerToHijriCalendar = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 9, 25)
}).then(function (picker) {
datePicker = picker;
cleanup = addGlobChangeEvent(picker);
var c = new glob.Calendar();
var calendarType = "GregorianCalendar";
var myCalendar = new glob.Calendar(c.languages, calendarType, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendarType), getActualUIOrder(), "Incorrect UI order for GregorianCalendar");
calendarType = "HijriCalendar";
picker.winControl.calendar = calendarType;
myCalendar.changeCalendarSystem(calendarType);
LiveUnit.Assert.areEqual(getExpectedOrder(calendarType), getActualUIOrder(), "Incorrect UI order for HijriCalendar");
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testHijriCalenderLeapDays = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'HijriCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
//var calendar = picker['data-win-control'].calendar;
var myCalendar = new glob.Calendar(c.languages, calendarType, c.getClock());
var totalNumOfDays = 30;
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendarType);
LiveUnit.Assert.areEqual(getExpectedOrder(calendarType), getActualUIOrder(), "Incorrect UI order");
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
if (i === 11 && hijriLeapYear(selectControls.yearSelect.value))
i++;
LiveUnit.Assert.areEqual(totalNumOfDays - (i % 2), selectControls.dateSelect.length, "Error in the number of days in " + selectControls.monthSelect.value + " the Hijri calendar");
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testHijriCalenderLeapDaysInLeapYears = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'HijriCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var totalNumOfDays = 30;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
selectControls.yearSelect.value = "1431"; //example of Leap year
fireOnchange(selectControls.yearSelect);
checkMonthNames(selectControls, calendar);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
if (i === 11 && hijriLeapYear(selectControls.yearSelect.value))
i++;
LiveUnit.Assert.areEqual(totalNumOfDays - (i % 2), selectControls.dateSelect.length, "Error in the number of days in " + selectControls.monthSelect.value + " the Hijri calendar");
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testSpecialHebrewCalendarScenario = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'HebrewCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var d = new Date();
var currentYear = d.getFullYear();
//5784 in hebrew === 2024 in Gregorian
selectControls.yearSelect.selectedIndex = selectControls.yearSelect.selectedIndex + (2024 - currentYear);
fireOnchange(selectControls.yearSelect);
selectControls.dateSelect.selectedIndex = 28;
fireOnchange(selectControls.dateSelect);
selectControls.monthSelect.selectedIndex = 3;
fireOnchange(selectControls.monthSelect);
var max = selectControls.dateSelect.length;
LiveUnit.Assert.areEqual(29, max, "Wrong number of days populated into dateControl");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testSpecialHebrewCalendar = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'HebrewCalendar';
createPickerWithAppend({
calendar: calendarType,
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var totalNumOfDays = 30;
var years = ["5784", "5785"]; //first is a leap year and second is not leap year
var gregorianYears = [2024, 2025]; //Gregorian Years
var numOfmonths;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
for (var j = 0; j < years.length; j++) {
var d = new Date();
var currentYear = d.getFullYear();
selectControls.yearSelect.selectedIndex = selectControls.yearSelect.selectedIndex + (parseInt(years[j]) - myCalendar.year); //selectControls.yearSelect.selectedIndex + (gregorianYears[j] - currentYear);
myCalendar.year = parseInt(years[j]); // Update local calendar to stay in sync with DatePicker's private calendar
fireOnchange(selectControls.yearSelect);
checkMonthNames(selectControls, calendar, years[j]);
numOfmonths = 12;
var isLeap = isHebrewLeapYear(years[j]);
if (isLeap)
numOfmonths = 13;
LiveUnit.Assert.areEqual(numOfmonths, selectControls.monthSelect.length, "incorrect number of months in year " + years[j]);
for (var i = 0; i < numOfmonths; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
validateMonth(i + 1, isLeap, selectControls.dateSelect.length);
}
}
LiveUnit.Assert.areEqual(numOfmonths, selectControls.monthSelect.length, "Error in the number of months in the Hebrew calendar");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testNonLeapYearInGregorianCalendar = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'GregorianCalendar';
var cleanup;
createPickerWithAppend({
calendar: calendarType,
minYear: 1899,
current: new Date(1900, 3, 7)
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendar);
selectControls.monthSelect.selectedIndex = 1;
fireOnchange(selectControls.monthSelect);
var totalNumofDays = 28;
LiveUnit.Assert.areEqual(totalNumofDays, selectControls.dateSelect.length, "Error in the number of days in a non leap year");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testLeapYearInGregorianCalendar = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'GregorianCalendar';
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2000, 3, 7)
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendar);
selectControls.monthSelect.selectedIndex = 1;
fireOnchange(selectControls.monthSelect);
var totalNumofDays = 29;
LiveUnit.Assert.areEqual(totalNumofDays, selectControls.dateSelect.length, "Error in the number of days in a non leap year");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testNumOfDaysInKoreanCalendar = function (complete) {
//WinRT bug
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'KoreanCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendar);
for (var j = 0; j < 5; j++) {
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
var numOfdaysInMonth = daysInMonth(convertFromKoreanToGreogrianYear(parseInt(selectControls.yearSelect.value)), i);
LiveUnit.Assert.areEqual(numOfdaysInMonth, selectControls.dateSelect.length, "Error in the number of days in Korean Calendar month " + (i + 1) + " and year " + selectControls.yearSelect.value);
}
selectControls.yearSelect.selectedIndex++; //to make sure we included a leap year
fireOnchange(selectControls.yearSelect);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testKnownDayInThai = function (complete) {
//BugID: 449918
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'ThaiCalendar';
createPickerWithAppend({
calendar: calendarType,
minYear: 2000,
current: new Date(2001, 0, 1)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendar);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual('2544', year, "Error in known year in Thai");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Thai");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Thai");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testNumOfDaysInThaiCalendar = function (complete) {
//BugID: 651414
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'ThaiCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
for (var j = 0; j < 5; j++) {
checkMonthNames(selectControls, calendar, selectControls.yearSelect.value);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
var numOfdaysInMonth = daysInMonth(convertFromThaiToGreogrianYear(parseInt(selectControls.yearSelect.value)), i);
LiveUnit.Assert.areEqual(numOfdaysInMonth, selectControls.dateSelect.length, "Error in the number of days in Thai Calendar month " + (i + 1) + " and year " + selectControls.yearSelect.value);
}
selectControls.yearSelect.selectedIndex++; //to make sure we included a leap year
fireOnchange(selectControls.yearSelect);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testJapaneseCalendarKnownDate = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JapaneseCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(2001, 0, 1)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendar);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("13", year, "Error in known year in Japanese");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Japanese");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Japanese");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testJapaneseCalendarKnownBuggyDate = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JapaneseCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(1989, 0, 7)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
LiveUnit.Assert.areEqual(1, selectControls.monthSelect.length, "Error in number of months in Japanese Calendar");
LiveUnit.Assert.areEqual(7, selectControls.dateSelect.length, "Error in number of days in Japanese Calendar");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testJapaneseCalendarKnownDate_temp = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JapaneseCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(2001, 0, 1),
minYear: 1990,
maxYear: 2010
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendar);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("13", year, "Error in known year in Japanese");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Japanese");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Japanese");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testNumOfDaysInJapaneseCalendar = function (complete) {
//WinRT bug
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JapaneseCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
for (var j = 0; j < 5; j++) {
myCalendar.addMonths(-1 * myCalendar.month + 1);
checkMonthNames(selectControls, calendar, selectControls.yearSelect.value);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
setBackEnd(myCalendar);
var numOfdaysInMonth = daysInMonth(myCalendar.year, i);
myCalendar.changeCalendarSystem(calendar);
if (i !== 1) //in order not to add an extra year by mistake
myCalendar.addMonths(1);
LiveUnit.Assert.areEqual(numOfdaysInMonth, selectControls.dateSelect.length, "Error in the number of days in Japanese Calendar month " + (i + 1) + " and year " + selectControls.yearSelect.value);
}
myCalendar.addYears(1);
selectControls.yearSelect.selectedIndex++; //to make sure we included a leap year
fireOnchange(selectControls.yearSelect);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testMinAndMaxYearGlob = function (complete) {
//BugID: 450489
if (isWinRTEnabled()) {
var calendarType = 'HijriCalendar';
var cleanup;
var minYearValue = 1430;
var maxYearValue = 1462;
createPickerWithAppend({
calendar: calendarType,
minYear: 2009,
maxYear: 2040
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendar);
LiveUnit.Assert.areEqual(minYearValue + '', selectControls.yearSelect[0].value, "Error in the specified minimum year");
LiveUnit.Assert.areEqual(maxYearValue + '', selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Error in the specified maximum year");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testJulianCalendarKnownDay = function (complete) {
//not working because of a problem with WinRT Glob
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JulianCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(2001, 0, 1)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
checkMonthNames(selectControls, calendar);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("2000", year, "Error in known year in Julian");
LiveUnit.Assert.areEqual(11, selectControls.monthSelect.selectedIndex, "Error in known month in Julian");
LiveUnit.Assert.areEqual(18, selectControls.dateSelect.selectedIndex, "Error in known day in Julian");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testJulianCalendarNumOfDays = function (complete) {
//not working because of a problem with WinRT Glob
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'JulianCalendar';
createPickerWithAppend({
calendar: calendarType,
minYear: 1889,
maxYear: 2050
}).then(function (picker) {
datePicker = picker;
//1900 is a leap year in Julian Calendar but not a leap year in Gregorian Calendar
//2000 is a leap year in both Gregorian and Julian Calendars due to the exception fo divisible by 400
//2002 is a non leap year in both Gregorian and Julian Calendars
//2004 is a leap year in both Gregorian and Julian Calendars with no exceptions of divisible by 400
var years = [1900, 2000, 2002, 2004];
var calendar = calendarType;
var selectControls = getControls(picker);
for (var i = 0; i < years.length; i++) {
selectControls.yearSelect.value = years[i];
fireOnchange(selectControls.yearSelect);
checkMonthNames(selectControls, calendar, years[i]);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var isLeap = isJulianLeapYear(years[i]);
for (var j = 0; j < 12; j++) {
selectControls.monthSelect.selectedIndex = j;
fireOnchange(selectControls.monthSelect);
var year = years[i];
if (isLeap) {
year = 2000; //to make sure that daysInMonth will work for Julian too to account for the case that the year is divisible by 100
}
var numOfDays = daysInMonth(year, j);
LiveUnit.Assert.areEqual(numOfDays, selectControls.dateSelect.length, "Correct number of days in month " + (j + 1) + " in year " + years[i]);
}
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testTaiwanKnownDate = function (complete) {
//BugID: 628192
//BugID: 449809
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'TaiwanCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(2001, 0, 1)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("90", year, "Error in known year in Taiwan Calendar");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Taiwan Calendar");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Taiwan Calendar");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testTaiwanKnownDate_temp = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'TaiwanCalendar';
createPickerWithAppend({
calendar: calendarType,
current: new Date(2001, 0, 1),
minYear: 1990,
maxYear: 2011
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
var year = numInYear(selectControls.yearSelect.value);
LiveUnit.Assert.areEqual("90", year, "Error in known year in Taiwan Calendar");
LiveUnit.Assert.areEqual(0, selectControls.monthSelect.selectedIndex, "Error in known month in Taiwan Calendar");
LiveUnit.Assert.areEqual(0, selectControls.dateSelect.selectedIndex, "Error in known day in Taiwan Calendar");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testTaiwanCalendarNumOfDays = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'TaiwanCalendar';
createPickerWithAppend({
calendar: calendarType,
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
for (var j = 0; j < 5; j++) {
checkMonthNames(selectControls, calendar, selectControls.yearSelect.value);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
var numOfdaysInMonth = daysInMonth(convertFromTaiwanToGreogrianYear(parseInt(selectControls.yearSelect.value)), i);
LiveUnit.Assert.areEqual(numOfdaysInMonth, selectControls.dateSelect.length, "Error in the number of days in Korean Calendar month " + (i + 1) + " and year " + selectControls.yearSelect.value);
}
selectControls.yearSelect.selectedIndex++; //to make sure we included a leap year
fireOnchange(selectControls.yearSelect);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testTaiwanCalendarNumOfDays_temp = function (complete) {
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'TaiwanCalendar';
createPickerWithAppend({
calendar: calendarType,
maxYear: 2020,
minYear: 1990
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
for (var j = 0; j < 5; j++) {
checkMonthNames(selectControls, calendar, selectControls.yearSelect.value);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
var numOfdaysInMonth = daysInMonth(convertFromTaiwanToGreogrianYear(parseInt(selectControls.yearSelect.value)), i);
LiveUnit.Assert.areEqual(numOfdaysInMonth, selectControls.dateSelect.length, "Error in the number of days in Korean Calendar month " + (i + 1) + " and year " + selectControls.yearSelect.value);
}
selectControls.yearSelect.selectedIndex++; //to make sure we included a leap year
fireOnchange(selectControls.yearSelect);
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testUmAlQuraCalenderLeapDays = function (complete) {
//BugID: 449812
if (isWinRTEnabled()) {
var calendarType = 'UmAlQuraCalendar';
createPickerWithAppend({
calendar: calendarType
}).then(function (picker) {
datePicker = picker;
var c = new glob.Calendar();
var calendar = calendarType;
var myCalendar = new glob.Calendar(c.languages, calendar, c.getClock());
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendar);
for (var i = 0; i < 12; i++) {
selectControls.monthSelect.selectedIndex = i;
fireOnchange(selectControls.monthSelect);
var numOfDays = selectControls.dateSelect.length;
if (numOfDays !== 29 && numOfDays !== 30)
LiveUnit.Assert.areEqual(30, numOfDays, "Error in the number of days in " + selectControls.monthSelect.value + " the UmAlQura calendar");
}
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testDateWithObviousFormat = function (complete) {
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'April 01, 2011',
yearPattern: "{year.full}",
monthPattern: "{month.integer(2)}",
datePattern: "{day.integer(2)}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: '01', month: '04', year: '2011' });
attachEventListeners(picker);
// change the day
picker.winControl.current = 'April 02, 2011';
verifyDatePickerContent(picker, { day: '02', month: '04', year: '2011' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithAbbreviatedNameFormat = function (complete) {
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'December 22, 2011',
yearPattern: "{year.abbreviated(2)}",
monthPattern: "{month.abbreviated(3)} .",
datePattern: "{dayofweek.abbreviated(4)} .",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'Thu .', month: 'Dec .', year: '11' });
picker.winControl.current = 'February 02, 2005';
verifyDatePickerContent(picker, { day: 'Wed .', month: 'Feb .', year: '05' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithAbbreviatedFormat = function (complete) {
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'April 05, 2011',
yearPattern: "year {year.full}",
monthPattern: "month {month.full}",
datePattern: "day {dayofweek.solo.full}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Tuesday', month: 'month April', year: 'year 2011' });
// change the day
picker.winControl.current = 'March 25, 2012';
verifyDatePickerContent(picker, { day: 'day Sunday', month: 'month March', year: 'year 2012' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithSoloFormats = function (complete) {
//BugID: 628192
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'May 30, 1999',
yearPattern: "example {year.abbreviated(2)}",
monthPattern: "month {month.solo.full}",
datePattern: "day {dayofweek.solo.full}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Sunday', month: 'month May', year: 'example 99' });
// change the day
picker.winControl.current = 'July 26, 2019';
verifyDatePickerContent(picker, { day: 'day Friday', month: 'month July', year: 'example 19' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithSoloFormats_temp = function (complete) {
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'May 30, 1999',
minYear: 1990,
maxyYear: 2011,
yearPattern: "example {year.abbreviated(2)}",
monthPattern: "month {month.solo.full}",
datePattern: "day {dayofweek.solo.full}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Sunday', month: 'month May', year: 'example 99' });
// change the day
picker.winControl.current = 'July 26, 2019';
verifyDatePickerContent(picker, { day: 'day Friday', month: 'month July', year: 'example 19' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithSoloAbbreviatedFormats = function (complete) {
//BugID: 628192
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'May 30, 1999',
yearPattern: "example {year.abbreviated(2)}",
monthPattern: "month {month.solo.abbreviated(3)}",
datePattern: "day {dayofweek.solo.abbreviated(2)}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Su', month: 'month May', year: 'example 99' });
// change the day
picker.winControl.current = 'July 26, 2019';
verifyDatePickerContent(picker, { day: 'day Fr', month: 'month Jul', year: 'example 19' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testDateWithSoloAbbreviatedFormats_temp = function (complete) {
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'May 30, 1999',
minYear: 1990,
maxYear: 2020,
yearPattern: "example {year.abbreviated(2)}",
monthPattern: "month {month.solo.abbreviated(3)}",
datePattern: "day {dayofweek.solo.abbreviated(2)}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Su', month: 'month May', year: 'example 99' });
// change the day
picker.winControl.current = 'July 26, 2019';
verifyDatePickerContent(picker, { day: 'day Fr', month: 'month Jul', year: 'example 19' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testCreateMutipleDatePickersOneWithPatternAndOneWithout = function (complete) {
//BugID: 628192
//BugID: 538276
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'May 30, 1999',
yearPattern: "example {year.abbreviated(2)}",
monthPattern: "month {month.solo.abbreviated(3)}",
datePattern: "day {dayofweek.solo.abbreviated(2)}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day Su', month: 'month May', year: 'example 99' });
// change the day
picker.winControl.current = 'July 26, 2019';
verifyDatePickerContent(picker, { day: 'day Fr', month: 'month Jul', year: 'example 19' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(function () {
createPickerWithAppend({
current: 'October 30, 2011',
calendar: 'GregorianCalendar',
}).then(function (picker) {
verifyDatePickerContent(picker, { day: '30', month: 'October', year: '2011' });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker);
})
.then(complete, complete);
}
else {
complete();
}
};
xtestVerifyCSSVerticalLayout = function (complete) {
/* bug #555425, test passes in IE and wwahost if you comment out these lines in the ui-dark.css file:
.win-datepicker {
display: -ms-inline-box;
height: auto;
width: auto;
}
when bug #555425 is fixed, similar test case needs to be added to timepicker-decl.js
*/
var targetStyleSheet = null;
var newRuleIndex = -1;
createPickerWithAppend().then(function (picker:any) {
/* add this rule so that any subelements starting with 'win-datepicker' get the new style to make them layout vertical
.vertAlwaysDate *[class^="win-datepicker"] {
display:block;
float:none;
}
*/
var countStyleSheets = document.styleSheets.length;
// append new stylesheet to head
targetStyleSheet = document.createElement('STYLE');
document.head.appendChild(targetStyleSheet);
// verify new style sheet was added
LiveUnit.Assert.isTrue(document.styleSheets.length === countStyleSheets + 1);
countStyleSheets++;
// append new rule to the style sheet
targetStyleSheet = document.styleSheets[countStyleSheets - 1];
var countRules = targetStyleSheet.cssRules.length;
// note: return from addRule is squirrely. When adding to sheet 0 it returns 1735 (perhaps index of *all* rules??), but cssRules.length is only 389 for the sheet 0
targetStyleSheet.addRule(".vertAlwaysDate *[class^='win-datepicker']", "float: none; display: block; background-color:red;");
newRuleIndex = targetStyleSheet.cssRules.length - 1;
LiveUnit.Assert.isTrue(targetStyleSheet.cssRules.length === countRules + 1, "expected #cssRules + 1 after addRule; previous #cssRules=" + countRules + ", after addRule #cssRules=" + targetStyleSheet.cssRules.length);
LiveUnit.Assert.areEqual(".vertAlwaysDate *[class^='win-datepicker'] { float: none; display: block; background-color: red; }", targetStyleSheet.cssRules[newRuleIndex].cssText);
// add the vertAlwaysDate class to the datepicker control
picker.className += " vertAlwaysDate";
// verify the Y coordinates of child items are in increasing order by at least height of the child SELECT element
var currTop, currHeight;
var prevTop = -1;
var height0 = picker.childNodes[0].offsetHeight;
for (var x = 0; x < picker.childNodes.length; x++) {
currHeight = picker.childNodes[x].offsetHeight;
LiveUnit.Assert.isTrue(currHeight === height0, "expected height of current element === height of first element. Actual: currHeight=" + currHeight + ", height0=" + height0);
currTop = picker.childNodes[x].offsetTop;
LiveUnit.Assert.isTrue(currTop >= (prevTop + currHeight), "expecting currTop > (prevTop + currHeight). Actual: currTop=" + currTop + ", prevTop=" + prevTop + ", currHeight=" + currHeight);
prevTop = currTop;
}
})
.then(null, unhandledTestError)
.then(function () {
if (targetStyleSheet) {
targetStyleSheet.removeRule(newRuleIndex);
}
cleanupDatePicker();
})
.then(complete, complete);
};
testGetInformationCheckValidityOfControls = function (complete) {
if (!isWinRTEnabled()) {
var cleanup;
DatePicker.getInformation = getInformationJS;
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: new Date(2011, 11, 31)
}).then(function (picker) {
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("DYM", getActualUIOrder(), "Incorrect UI order");
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("sixthMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("61", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2011", selectControls.yearSelect.value, "checking the correctness of the selected year");
for (var j = 0; j < 4; j++) { // to make sure that we will hit a leap year
setYear(picker, (parseInt(selectControls.yearSelect.value) + 1) + '');
for (var i = 1; i <= selectControls.monthSelect.length; i++) {
setMonth(picker, i);
LiveUnit.Assert.isTrue(checkDayCount(selectControls));
}
}
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else {
complete();
}
};
testSpecialCustomCalendarCasesWithChangeEvents = function (complete) {
if (!isWinRTEnabled()) {
var cleanup;
DatePicker.getInformation = getInformationJS;
createPickerWithAppend({ current: new Date(2011, 11, 31) }).then(function (picker) {
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("DYM", getActualUIOrder(), "Incorrect UI order");
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("sixthMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("61", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2011", selectControls.yearSelect.value, "checking the correctness of the selected year");
setMonth(picker, 1);
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("firstMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("59", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2011", selectControls.yearSelect.value, "checking the correctness of the selected year");
setMonth(picker, 4);
setDate(picker, 62);
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("fourthMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("62", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2011", selectControls.yearSelect.value, "checking the correctness of the selected year");
setYear(picker, "2012");
setMonth(picker, 5);
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("fifthMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("61", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2012", selectControls.yearSelect.value, "checking the correctness of the selected year");
setMonth(picker, 1);
LiveUnit.Assert.areEqual(6, selectControls.monthSelect.length, "checking the number of months in the custom calendar");
LiveUnit.Assert.areEqual("firstMonth", selectControls.monthSelect.value, "checking the correctness of the selected month");
LiveUnit.Assert.areEqual("60", selectControls.dateSelect.value, "checking the correctness of the selected day");
LiveUnit.Assert.areEqual("2012", selectControls.yearSelect.value, "checking the correctness of the selected year");
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else {
complete();
}
};
testUmAlQuaraCalenderKnownDate = function (complete) {
if (isWinRTEnabled()) {
var calendarType = 'UmAlQuraCalendar';
var cleanup;
createPickerWithAppend({
calendar: calendarType,
current: new Date(2011, 9, 25)
}).then(function (picker) {
datePicker = picker;
cleanup = addGlobChangeEvent(picker);
var c = new glob.Calendar();
var myCalendar = new glob.Calendar(c.languages, "GregorianCalendar", c.getClock());
myCalendar.year = 2011;
myCalendar.month = 10;
myCalendar.day = 25;
myCalendar.changeCalendarSystem("UmAlQuraCalendar");
setBackEnd(myCalendar);
setUI(myCalendar, calendarType);
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendarType), getActualUIOrder(), "Incorrect UI order");
checkMonthNames(selectControls, calendarType);
myCalendar.addMonths(1);
++selectControls.monthSelect.selectedIndex;
setBackEnd(myCalendar);
setUI(myCalendar, calendarType);
fireOnchange(selectControls.monthSelect);
})
.then(null, unhandledTestError)
.then(cleanup) //placed after the error handler to make sure it gets removed even if the test case failed
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testMaxAndMinInUmAlQuraCalender = function (complete) {
//BugID: 566405
//UmAlquara calendar supports up to 2029
if (isWinRTEnabled()) {
var calendarType = 'UmAlQuraCalendar';
createPickerWithAppend({
calendar: calendarType,
maxYear: 2200,
minYear: 623 //first year of umalquara is 1900
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1318", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("1500", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testMaxAndMinInHijriCalender = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
var calendarType = 'HijriCalendar';
createPickerWithAppend({
calendar: calendarType,
maxYear: 2200,
minYear: 623 //first year of HijriCalendar
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1627", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testMaxAndMinYearInGregorianWithZeroMinYear = function (complete) {
//BugID: 566275
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
maxYear: 2000,
minYear: 0
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("2000", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testMaxAndMinYearInGregorianWithNegativeMinYear = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
maxYear: 2000,
minYear: -1
}).then(function (picker) {
datePicker = picker;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("2000", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testTaiwanLeapMinYears = function (complete) {
//BugID: 628192
if (isWinRTEnabled()) {
var cleanup;
var calendarType = 'TaiwanCalendar';
createPickerWithAppend({
calendar: calendarType,
minYear: 1899,
maxYear: 2111,
current: new Date(1900, 0, 1)
}).then(function (picker) {
datePicker = picker;
var calendar = calendarType;
var selectControls = getControls(picker);
LiveUnit.Assert.areEqual(getExpectedOrder(calendar), getActualUIOrder(), "Incorrect UI order");
LiveUnit.Assert.areEqual("1", selectControls.yearSelect[0].value, "Incorrect min date");
LiveUnit.Assert.areEqual("200", selectControls.yearSelect[selectControls.yearSelect.length - 1].value, "Incorrect max date");
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, unhandledTestError);
}
else
complete();
};
testDateWithAbbreviatedWithSpecialCharacters = function (complete) {
if (isWinRTEnabled()) {
createPickerWithAppend({
calendar: 'GregorianCalendar',
current: 'April 05, 2011',
yearPattern: "year: ' & < > # {year.full}",
monthPattern: 'month: ! @ $ % ^ " {month.full}',
datePattern: "day ( ) - + = _ {dayofweek.solo.full}",
}).then(function (picker) {
verifyDatePickerContent(picker, { day: 'day ( ) - + = _ Tuesday', month: 'month: ! @ $ % ^ " April', year: "year: ' & < > # 2011" });
// change the day
picker.winControl.current = 'March 25, 2012';
verifyDatePickerContent(picker, { day: 'day ( ) - + = _ Sunday', month: 'month: ! @ $ % ^ " March', year: "year: ' & < > # 2012" });
})
.then(null, unhandledTestError)
.then(cleanupDatePicker)
.then(complete, complete);
}
else {
complete();
}
};
testConstructionWithEventHandlerInOptions = function (complete) {
var handler = function () {
complete();
};
var dp = new DatePicker(null, { onchange: handler });
document.body.appendChild(dp.element);
var evnt = <UIEvent>document.createEvent("UIEvents");
evnt.initUIEvent("change", false, false, window, 0);
dp.element.dispatchEvent(evnt);
};
};
}
LiveUnit.registerTestClass("CorsicaTests.DatePickerDecl"); | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.