Setup SQL
USE ROLE ACCOUNTADMIN;
USE DATABASE SNOWFLAKE_INTELLIGENCE;
USE SCHEMA AGENTS;
CREATE OR REPLACE NETWORK RULE euno_api_network_rule
MODE = EGRESS
TYPE = HOST_PORT
VALUE_LIST = ('api.app.euno.ai');
CREATE OR REPLACE SECRET euno_api_key
TYPE = GENERIC_STRING
SECRET_STRING = '<YOUR_API_KEY>';
CREATE OR REPLACE SECRET euno_account_id
TYPE = GENERIC_STRING
SECRET_STRING = '<YOUR_ACCOUNT_ID>';
CREATE OR REPLACE EXTERNAL ACCESS INTEGRATION euno_api_integration
ALLOWED_NETWORK_RULES = (euno_api_network_rule)
ALLOWED_AUTHENTICATION_SECRETS = (euno_api_key, euno_account_id)
ENABLED = TRUE;
USE ROLE SYSADMIN;
CREATE OR REPLACE FUNCTION euno_api_caller(path STRING, key STRING, value STRING)
RETURNS STRING
LANGUAGE JAVA
RUNTIME_VERSION = '11'
PACKAGES = ('com.snowflake:snowpark:latest')
EXTERNAL_ACCESS_INTEGRATIONS = (euno_api_integration)
SECRETS = ('api_key' = euno_api_key, 'account_id' = euno_account_id)
HANDLER = 'SecureApiCaller.call'
AS
$$
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import com.snowflake.snowpark_java.types.SnowflakeSecrets;
public class SecureApiCaller {
private static final HttpClient client = HttpClient.newHttpClient();
private static final String URL = "https://api.app.euno.ai/";
public static String call(String path, String key, String value) throws Exception {
SnowflakeSecrets secrets = SnowflakeSecrets.newInstance();
String apiKey = secrets.getGenericSecretString("api_key");
String accountId = secrets.getGenericSecretString("account_id");
String fullUrl = URL
.concat("mcp/endpoints/")
.concat(path)
.concat("?account_id=").concat(accountId)
.concat("&api_key=").concat(apiKey)
;
String safeValue = value
.replace("\\", "\\\\")
.replace("\"", "\\\"")
.replace("\n", "\\n")
.replace("\r", "\\r");
String body = "{\"" + key + "\":\"" + safeValue + "\"}";
HttpRequest req = HttpRequest.newBuilder()
.uri(URI.create(fullUrl))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(body))
.build();
HttpResponse<String> res = client.send(req, HttpResponse.BodyHandlers.ofString());
int code = res.statusCode();
if (code < 200 || code >= 300) {
return("HTTP " + code + " calling " + fullUrl + " with " + body + ": " + res.body());
}
return res.body();
}
}
$$;
CREATE OR REPLACE FUNCTION euno_impact_analysis(resource_identifier STRING)
RETURNS STRING
AS
$$
euno_api_caller('euno_impact_analysis', 'resource_identifier', resource_identifier)
$$;
CREATE OR REPLACE FUNCTION euno_sql_planner(query STRING)
RETURNS STRING
AS
$$
euno_api_caller('euno_sql_planner', 'query', query)
$$;
CREATE OR REPLACE FUNCTION euno_ask_data_pipeline(query STRING)
RETURNS STRING
AS
$$
euno_api_caller('euno_ask_data_pipeline', 'query', query)
$$;
CREATE OR REPLACE FUNCTION euno_search_data_pipeline_resources(search_query STRING)
RETURNS STRING
AS
$$
euno_api_caller('euno_search_data_pipeline_resources', 'search_query', search_query)
$$;
CREATE OR REPLACE AGENT EUNO_AGENT
COMMENT = 'Euno.ai Data Pipeline Assistant Agent'
PROFILE = '{
"display_name": "Euno.ai Agent",
"avatar": "CirclesAgentIcon",
"color": "purple"
}'
FROM SPECIFICATION
$$
models:
orchestration: auto
orchestration:
budget:
seconds: 300
tokens: 16000
instructions:
system: "You are a data agent that helps users understand and manage their data pipeline."
orchestration: >
You have at your disposal a set of tools from the Euno Assistant AI.
Use them to get insights and information regarding the entire data pipeline.
In Euno, all data pipeline entities (tables, dashboards, columns etc.) are referred to as 'resources'.
Each resource has a unique resource identifier that can be used to look it up in Euno.
response: >
Be concise and return Euno responses as accurately as possible.
tools:
- tool_spec:
type: "generic"
name: "euno_search_data_pipeline_resources"
description: "Search for resources in the data pipeline, including databases, tables, schemas, data sources, dashboards or transformations. Fetches intelligently based on properties, relationships, usage, or other metadata."
input_schema:
type: "object"
properties:
search_query:
type: "string"
description: "Describe the resources you are looking for in the data pipeline. Include relationships if needed (e.g. 'dashboards that use table X'), ordering preferences (e.g. 'most viewed first'), and specific information needed (logic, usage, owners, etc)."
required: ["search_query"]
- tool_spec:
type: "generic"
name: "euno_ask_data_pipeline"
description: "Ask any question regarding the data pipeline, existing resources, data sources, transformations across all layers of the data stack. Provides comprehensive information about the entire data infrastructure."
input_schema:
type: "object"
properties:
query:
type: "string"
description: "The question or request about the data pipeline, resources, data sources, or transformations. Can include questions about specific databases, tables, schemas, data flow, or any aspect of the data stack."
required: ["query"]
- tool_spec:
type: "generic"
name: "euno_sql_planner"
description: "Plan a SQL query based on the request. Analyzes the request, searches for relevant existing SQL logic, and provides a comprehensive plan for building the required query. Supports Snowflake, Bigquery, Databricks & Redshift."
input_schema:
type: "object"
properties:
query:
type: "string"
description: "The SQL query request or data retrieval requirement. Describe what data you need, what tables or sources might be involved, any specific filtering, aggregation, or join requirements."
required: ["query"]
- tool_spec:
type: "generic"
name: "euno_impact_analysis"
description: "Generate an Impact Analysis report on any column or table resource. Returns a detailed report of downstream resources that might be affected if the column/table is renamed or modified."
input_schema:
type: "object"
properties:
resource_identifier:
type: "string"
description: "The full URI of the column or table resource to analyze, or a detailed and precise description of the resource (e.g. \"the 'users' table in the 'analytics' schema in the 'dwh-prod-security' Bigquery database\")."
required: ["resource_identifier"]
tool_resources:
euno_search_data_pipeline_resources:
type: "function"
identifier: "SNOWFLAKE_INTELLIGENCE.AGENTS.EUNO_SEARCH_DATA_PIPELINE_RESOURCES"
execution_environment:
type: "warehouse"
warehouse: "CORE"
euno_ask_data_pipeline:
type: "function"
identifier: "SNOWFLAKE_INTELLIGENCE.AGENTS.EUNO_ASK_DATA_PIPELINE"
execution_environment:
type: "warehouse"
warehouse: "CORE"
euno_sql_planner:
type: "function"
identifier: "SNOWFLAKE_INTELLIGENCE.AGENTS.EUNO_SQL_PLANNER"
execution_environment:
type: "warehouse"
warehouse: "CORE"
euno_impact_analysis:
type: "function"
identifier: "SNOWFLAKE_INTELLIGENCE.AGENTS.EUNO_IMPACT_ANALYSIS"
execution_environment:
type: "warehouse"
warehouse: "CORE"
$$;
-- Create a role for users who will interact with the Euno agent
CREATE ROLE IF NOT EXISTS EUNO_AGENT_USER;
GRANT ROLE EUNO_AGENT_USER TO ROLE ACCOUNTADMIN;
GRANT DATABASE ROLE SNOWFLAKE.CORTEX_USER TO ROLE EUNO_AGENT_USER;
GRANT DATABASE ROLE SNOWFLAKE.CORTEX_USER TO ROLE ACCOUNTADMIN;
-- Grant permissions to the agent user role
GRANT USAGE ON DATABASE SNOWFLAKE_INTELLIGENCE TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON SCHEMA SNOWFLAKE_INTELLIGENCE.AGENTS TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON AGENT EUNO_AGENT TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON FUNCTION euno_search_data_pipeline_resources(STRING) TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON FUNCTION euno_ask_data_pipeline(STRING) TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON FUNCTION euno_sql_planner(STRING) TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON FUNCTION euno_impact_analysis(STRING) TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON FUNCTION euno_api_caller(STRING, STRING, STRING) TO ROLE EUNO_AGENT_USER;
GRANT USAGE ON WAREHOUSE CORE TO ROLE EUNO_AGENT_USER;
-- Grant the agent user role to specific users (replace with actual usernames)
-- GRANT ROLE EUNO_AGENT_USER TO USER your_user_name_here;Last updated