📖 How to Use This Cheat Sheet
🗄 Database 🚉 Stage
CREATE ALTER DROP DESCRIBE
CREATE
SHOW
ALTER ❄️ DROP DESCRIBE SHOW LIST ❄️
☁️ create or replace an existing database
☁️ create or replace an internal stage
] DATABASE [ IF NOT EXISTSCREATE
] <database_name>
[ OR REPLACE ] [ TEMPORARY ] STAGE [
[ DIRECTORY = ( ENABLE = { TRUE | FALSE
ESTAMP => <timestamp> | OFFSET => <time_differe
[ REFRESH_ON_CREATE = { TRUE | FA
' ] [ FILE_FORMAT = ( TYPE = { CSV | JSON |
[ COPY_OPTIONS = ( ON_ERROR = { CONTINUE
🗃 Schema Amazon S3 Microsoft Azure Google Cloud Storage
CREATE ALTER DROP ☁️
DESCRIBEcreate
SHOW ❄️
or replace an external stage for Amazon S3
☁️ create or replace an existing schemaCREATE [ OR REPLACE ] [ TEMPORARY ] STAGE [
[ DIRECTORY = ( ENABLE = { TRUE | FALSE
[ REFRESH_ON_CREATE
CREATE [ OR REPLACE ] [ TRANSIENT ] SCHEMA [ = { TRUE | FALS
[ CLONE <source_schema> [ AUTO_REFRESH = { TRUE | FALSE } ]
[ { AT | BEFORE [
} FILE_FORMAT
( { TIMESTAMP= ( TYPE = { CSV | JSON |
[ COPY_OPTIONS
[ COMMENT = '<string_literal>' ] = ( ON_ERROR = { CONTINUE
📊 Table 🚚 Loading Data
CREATE ALTER DROP COPY INTO
DESCRIBE SHOW(standard)
TRUNCATE ❄️
COPY INTO (with transformation) ❄️
☁️ create or replace an existing table☁️ copy data from internal or external stage
CREATE [ OR REPLACE ] COPY INTO [<namespace>.]<table_name>
[ { TEMPORARY | TRANSIENT
FROM
} ]{@[<namespace>.]<stage_name>[/<path
TABLE [ IF
( <col_name> <col_type> ) | @[<namespace>.]%<table_name>[/<pat
[ CLUSTER BY ( <expr> [ , <expr>
| @~[/<path>]
, ... ]
}
🔎 View
[ FILES = ( '<file_name>' [ , '<file_nam
[ PATTERN = '<regex_pattern>' ]
[ FILE_FORMAT = ( TYPE = { CSV | JSON |
[ ON_ERROR = { CONTINUE | SKIP_FILE | AB
CREATE ALTER DROP DESCRIBE SHOW ❄️
[ FORCE = { TRUE | FALSE } ]
[ VALIDATION_MODE = RETURN_<n>_ROWS | RE
☁️ create or replace an existing view
Amazon S3 Microsoft Azure Google Cloud Storage
CREATE [ OR REPLACE ] [ TEMPORARY ] VIEW [ I
[ ( <column_list> ) ]
☁️
copy data from Amazon S3 directly
AS <select_statement>
📸 Materialized View
COPY INTO [<namespace>.]<table_name>
FROM 's3://<bucket>[/<path>]'
[ { STORAGE_INTEGRATION = <integration_n
[ ENCRYPTION = ( [ TYPE = 'AWS_CSE' ] [
CREATE ALTER DROP DESCRIBE SHOW ❄️ [ TYPE = 'AWS_SSE_S3' ]
[ TYPE = 'AWS_SSE_KMS'
☁️ create or replace an existing materialized[ view [ TYPE = 'NONE' ] ) ]
FILES = ( '<file_name>' [ , '<file_nam
[ PATTERN = '<regex_pattern>' ]
CREATE [ OR REPLACE ] [ TEMPORARY ] MATERIAL
[ FILE_FORMAT = ( TYPE = { CSV | JSON |
[ ( <column_list> ) ] [ ON_ERROR = { CONTINUE | SKIP_FILE | AB
[ CLUSTER BY ( <expr1> [, <expr2>
[ FORCE ...
= { ] | FALSE } ]
TRUE
AS <select_statement> [ VALIDATION_MODE = RETURN_<n>_ROWS | RE
🔄 Dynamic Table🌀 Data Manipulation
CREATE ALTER DROP DESCRIBE
INSERTSHOW
UPDATE ❄️ MERGE DELETE ❄️
☁️ create or replace an existing dynamic
☁️ inserttableor replace data into a table from explicit values
CREATE [ OR REPLACE ] DYNAMIC
INSERT TABLE <dyn_tab
[ OVERWRITE ] INTO <target_table> [ (
TARGET_LAG = { '<num> { VALUES
seconds( |{ minute
<value> | DEFAULT | NULL } [
WAREHOUSE = <warehouse_name>
☁️ insert or replace data into a table from a select query
AS <query>
INSERT [ OVERWRITE
[ COMMENT = '<string_literal>' ] ] INTO <target_table> [ (
< query >
📋 Task 🪄 Function (UDF)
CREATE ALTER DROP ❄️
DESCRIBE SHOW
CREATE ALTER DROP DESCRIBE SHOW ❄️
☁️ create or replace an existing task
Python SQL JavaScript Scala
CREATE [ OR REPLACE ] TASK [ IF NOT EXISTS ]
create
[ WAREHOUSE = <string> ☁️
] or replace an existing Python UDF (user-defined
function)
[ SCHEDULE = '{ <num> MINUTE | USING CRO
[ ALLOW_OVERLAPPING_EXECUTION = TRUE | F
[ <session_parameter> = <value>
CREATE [ , <ses] [ TEMPORARY ] FUNCTION
[ OR REPLACE
[ USER_TASK_TIMEOUT_MS =RETURNS
<num> ]{ <result_data_type> | TABLE ( <
[ SUSPEND_TASK_AFTER_NUM_FAILURES = <num
[ [ NOT ] NULL ]
[ COMMENT = '<string_literal>' ]
LANGUAGE PYTHON
[ AFTER <string> [ , <string> , ...ON
[ { CALLED ] NULL
] INPUT | RETURNS NULL
[ WHEN <boolean_expr> ] RUNTIME_VERSION = <python_version>
AS
[ COMMENT = '<string_literal>' ]
{ <sql> | <stored_procedure> }
[ IMPORTS = ( '<stage_path_and_file_name
[ PACKAGES = ( '<package_name>[==<versio
HANDLER = '<function_name>'
CRON Expressions AS '<function_definition>'
🌊 Stream 🪜 Procedure
CREATE ALTER DROP DESCRIBE SHOW
CREATE ALTER
❄️ DROP DESCRIBE SHOW ❄️
☁️ create or replace an existing stream on a table
Python SQL JavaScript Scala
CREATE [ OR REPLACE ] STREAM [IF NOT EXISTS]
in-line
ON TABLE <table_name> on stage
[ APPEND_ONLY = TRUE | FALSE ]
☁️
[ SHOW_INITIAL_ROWS =create
TRUE | FALSE
or replace ]
an existing in-line Python stored procedure
[ COMMENT = '<string_literal>' ]
CREATE [ OR REPLACE ] PROCEDURE <procedure_n
☁️ create or replace an existing stream on aRETURNS { <result_data_type> [ [ NOT ] N
directory table
LANGUAGE PYTHON
RUNTIME_VERSION = '<python_version>'
PACKAGES = ( 'snowflake-snowpark-python[
CREATE [ OR REPLACE ] STREAM[ [IF NOT EXISTS]
IMPORTS = ( '<stage_path_and_file_name
ON STAGE <stage_name> HANDLER = '<function_name>'
[ COMMENT = '<string_literal>' ] = '<string_literal>' ]
[ COMMENT
AS '<procedure_definition>'
☁️ create or replace an existing stream on a view
🚰 Pipe
CREATE [ OR REPLACE ] STREAM [IF NOT EXISTS]
ON VIEW <view_name>
[ APPEND_ONLY = TRUE | FALSE
CREATE ALTER] DROP DESCRIBE SHOW PIPE_STATUS
[ SHOW_INITIAL_ROWS = TRUE | FALSE ]
[ COMMENT = '<string_literal>' ]
Amazon S3 Microsoft Azure Google Cloud Storage
🚨 Alert ☁️ create or replace a pipe from Amazon S3
CREATE ALTER DROP DESCRIBE SHOW HISTORY ❄️
CREATE [ OR REPLACE ] PIPE [ IF NOT EXISTS ]
[ AUTO_INGEST = [ TRUE | FALSE ] ]
☁️ create or replace an existing alert
[ ERROR_INTEGRATION = <integration_name>
[ AWS_SNS_TOPIC = '<string>' ]
[ COMMENT = '<string_literal>' ]
CREATE [ OR REPLACE ] ALERT AS <copy_statement>
[ IF NOT EXISTS
WAREHOUSE = <warehouse_name>
SCHEDULE = '{ <num> MINUTE | USING CRON
COMMENT = '<string_literal>'
IF( EXISTS(
<condition>
))
THEN
<action>
CRON Expressions