Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
CSS-Engine-Python-Cusca
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Proyectos-Innovacion-2024
CSS-Engine-Python-Cusca
Commits
cb3ae104
Commit
cb3ae104
authored
Apr 29, 2024
by
Erly Villaroel
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Cambios para manejar dataframe vacio
parent
ad60e8b9
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
2 additions
and
66 deletions
+2
-66
run.py
run.py
+2
-66
No files found.
run.py
View file @
cb3ae104
from
app
import
MainApplication
import
warnings
from
sqlalchemy.orm
import
sessionmaker
,
scoped_session
from
pyspark.sql
import
SparkSession
from
decimal
import
Decimal
from
pyspark.sql.types
import
StructType
,
StructField
,
StringType
,
DecimalType
,
ArrayType
warnings
.
filterwarnings
(
"ignore"
)
from
sqlalchemy
import
create_engine
from
app.main.engine.util.Utils
import
Utils
base
=
MainApplication
()
app
=
base
.
create_app
()
#
# if __name__ == "__main__":
# base.run(port=8000)
spark
=
SparkSession
.
builder
\
.
appName
(
"Crear DataFrame en PySpark"
)
\
.
getOrCreate
()
# Especificar el esquema del DataFrame
schema
=
StructType
([
StructField
(
"PIVOT_Fecha"
,
StringType
(),
True
),
StructField
(
"COUNTERPART_Fecha"
,
StringType
(),
True
),
StructField
(
"Cuenta"
,
StringType
(),
True
),
StructField
(
"Account"
,
StringType
(),
True
),
StructField
(
"DIFF"
,
DecimalType
(
10
,
2
),
True
),
StructField
(
"LISTA_DIFF"
,
ArrayType
(
StringType
()),
True
),
StructField
(
"INTER_PIVOT_ID"
,
StringType
(),
True
),
StructField
(
"INTER_CTP_ID"
,
StringType
(),
True
),
StructField
(
"PIVOT_Valor"
,
DecimalType
(
10
,
2
),
True
),
StructField
(
"COUNTERPART_Valor"
,
DecimalType
(
10
,
2
),
True
)
])
# Crear el DataFrame con datos de ejemplo
data
=
[
]
df
=
spark
.
createDataFrame
(
data
,
schema
)
# Mostrar el DataFrame
df
.
show
()
descriptor
=
{
"idProcess"
:
500240
,
"idScript"
:
"match-and-exclude"
,
"config-params"
:{
"max-records-per-combination"
:
10
,
"max-timeout-per-combination"
:
1000
,
"exclude-entity-pivot"
:
True
},
"params-input"
:
{
"pivot-config"
:
{
"tablename"
:
"PIVOT_TEMPORAL"
,
"id-column"
:
"ID"
,
"amount-columns"
:
"Valor"
,
"columns-group"
:
[
"Fecha"
,
"Cuenta"
],
"columns-transaction"
:
[
"Fecha"
,
"Cuenta"
,
"Valor"
]
},
"counterpart-config"
:
{
"tablename"
:
"PIVOT_TEMPORAL"
,
"id-column"
:
"ID"
,
"amount-columns"
:
"Valor"
,
"columns-group"
:
[
"Fecha"
,
"Account"
],
"columns-transaction"
:
[
"Fecha"
,
"Account"
,
"Valor"
]
}
}
}
a
=
Utils
(
app
)
.
create_result
(
df
,
descriptor
)
print
(
"diccion2rio"
,
a
)
engine
=
create_engine
(
"mysql+pymysql://root:root@192.168.0.11:3301/cusca"
)
session_factory
=
sessionmaker
(
bind
=
engine
)
session
=
session_factory
()
b
=
Utils
(
app
)
.
save_result
(
a
[
"detail"
],
descriptor
,
session
)
print
(
b
)
if
__name__
==
"__main__"
:
base
.
run
(
port
=
8000
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment