Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
B
bcom-tp-etl-transformation-pipelines
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
general
bcom-tp-etl-transformation-pipelines
Commits
941f6177
Commit
941f6177
authored
Aug 07, 2023
by
Cristian Aguirre
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Update 07-08-23. Update schedule from DAG's
parent
b1423b97
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
5 additions
and
5 deletions
+5
-5
dag_inform_process.py
dags/dag_inform_process.py
+2
-2
dag_reset_process.py
dags/dag_reset_process.py
+1
-1
dag_transformacion_bcom.py
dags/dag_transformacion_bcom.py
+2
-2
No files found.
dags/dag_inform_process.py
View file @
941f6177
...
@@ -20,7 +20,7 @@ logger = logging.getLogger()
...
@@ -20,7 +20,7 @@ logger = logging.getLogger()
DAG_NAME
=
"INFORM_PROCESS"
DAG_NAME
=
"INFORM_PROCESS"
# Change this path if is deployed in prod or dev
# Change this path if is deployed in prod or dev
MAIN_PATH
=
"/
roo
t/airflow/dags/"
MAIN_PATH
=
"/
op
t/airflow/dags/"
DEFAULT_ARGS
=
{
DEFAULT_ARGS
=
{
'owner'
:
'BCOM'
,
'owner'
:
'BCOM'
,
...
@@ -181,7 +181,7 @@ def set_dag():
...
@@ -181,7 +181,7 @@ def set_dag():
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
conf
=
data
[
"app"
]
conf
=
data
[
"app"
]
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que informa del último proceso ejecutado"
,
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que informa del último proceso ejecutado"
,
schedule_interval
=
conf
[
"inform_dag_schedule"
],
tags
=
[
"DAG BCOM - INFORM PROCESS"
],
catchup
=
Tru
e
)
as
dag
:
schedule_interval
=
conf
[
"inform_dag_schedule"
],
tags
=
[
"DAG BCOM - INFORM PROCESS"
],
catchup
=
Fals
e
)
as
dag
:
control_s3
=
conf
[
"control"
][
"s3_params"
]
control_s3
=
conf
[
"control"
][
"s3_params"
]
timezone
=
conf
[
"timezone"
]
timezone
=
conf
[
"timezone"
]
control_extractor
=
PythonOperator
(
control_extractor
=
PythonOperator
(
...
...
dags/dag_reset_process.py
View file @
941f6177
...
@@ -92,7 +92,7 @@ def set_dag():
...
@@ -92,7 +92,7 @@ def set_dag():
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
conf
=
data
[
"app"
]
conf
=
data
[
"app"
]
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que resetea el último proceso ejecutado"
,
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que resetea el último proceso ejecutado"
,
schedule_interval
=
conf
[
"reset_dag_schedule"
],
tags
=
[
"DAG BCOM - RESET PROCESS"
],
catchup
=
Tru
e
)
as
dag
:
schedule_interval
=
conf
[
"reset_dag_schedule"
],
tags
=
[
"DAG BCOM - RESET PROCESS"
],
catchup
=
Fals
e
)
as
dag
:
control_s3
=
conf
[
"control"
][
"s3_params"
]
control_s3
=
conf
[
"control"
][
"s3_params"
]
timezone
=
conf
[
"timezone"
]
timezone
=
conf
[
"timezone"
]
control_extractor
=
PythonOperator
(
control_extractor
=
PythonOperator
(
...
...
dags/dag_transformacion_bcom.py
View file @
941f6177
...
@@ -25,7 +25,7 @@ logger = logging.getLogger()
...
@@ -25,7 +25,7 @@ logger = logging.getLogger()
DAG_NAME
=
"BCOM_DAG_EXTRACT_AND_TRANSFORM"
DAG_NAME
=
"BCOM_DAG_EXTRACT_AND_TRANSFORM"
# Change this path if is deployed in prod or dev
# Change this path if is deployed in prod or dev
MAIN_PATH
=
"/
roo
t/airflow/dags/"
MAIN_PATH
=
"/
op
t/airflow/dags/"
DEFAULT_ARGS
=
{
DEFAULT_ARGS
=
{
'owner'
:
'BCOM'
,
'owner'
:
'BCOM'
,
...
@@ -143,7 +143,7 @@ def set_dag():
...
@@ -143,7 +143,7 @@ def set_dag():
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
logger
.
info
(
f
"CONFIGURACIÓN: {data}"
)
conf
=
data
[
"app"
]
conf
=
data
[
"app"
]
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que extrae y transforma"
,
with
DAG
(
DAG_NAME
,
default_args
=
DEFAULT_ARGS
,
description
=
"Proceso que extrae y transforma"
,
schedule_interval
=
conf
[
"schedule"
],
tags
=
[
"DAG BCOM - SQL TRANSFORMATIONS"
],
catchup
=
Tru
e
)
as
dag
:
schedule_interval
=
conf
[
"schedule"
],
tags
=
[
"DAG BCOM - SQL TRANSFORMATIONS"
],
catchup
=
Fals
e
)
as
dag
:
scripts_s3
=
conf
[
"scripts"
][
"s3_params"
]
scripts_s3
=
conf
[
"scripts"
][
"s3_params"
]
if
scripts_s3
[
"prefix"
]
.
endswith
(
"/"
):
if
scripts_s3
[
"prefix"
]
.
endswith
(
"/"
):
wildcard_scripts
=
scripts_s3
[
"prefix"
]
+
"?*"
wildcard_scripts
=
scripts_s3
[
"prefix"
]
+
"?*"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment