1
- module Morphir.Snowpark.MapFunctionsMapping exposing (..)
1
+ module Morphir.Snowpark.MapFunctionsMapping exposing (mapFunctionsMapping )
2
+
3
+ import Dict as Dict
2
4
import Morphir.Scala.AST as Scala
3
5
import Morphir.IR.Value as Value exposing (Pattern (..) , Value (..) )
4
6
import Morphir.IR.Type exposing (Type )
5
- import Morphir.Snowpark.MappingContext exposing (ValueMappingContext )
7
+ import Morphir.Snowpark.MappingContext exposing (ValueMappingContext , isCandidateForDataFrame )
8
+ import Morphir.IR.Value exposing (valueAttribute )
9
+ import Morphir.IR.Type as Type
10
+ import Morphir.Snowpark.MappingContext exposing (isAnonymousRecordWithSimpleTypes )
11
+ import Morphir.IR.Name as Name
12
+ import Morphir.Snowpark.Constants exposing (applySnowparkFunc )
13
+ import Morphir.Snowpark.MappingContext exposing (isBasicType )
6
14
15
+ type alias MapValueType ta = Value ta ( Type () ) -> ValueMappingContext -> Scala . Value
7
16
8
- mapFunctionsMapping : Value ta (Type () ) -> ( Value ta ( Type () ) -> ValueMappingContext -> Scala . Value ) -> ValueMappingContext -> Scala .Value
17
+ mapFunctionsMapping : Value ta (Type () ) -> MapValueType ta -> ValueMappingContext -> Scala .Value
9
18
mapFunctionsMapping value mapValue ctx =
10
19
case value of
11
20
Value . Apply _ ( Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " member" ] )) predicate) sourceRelation ->
@@ -14,5 +23,95 @@ mapFunctionsMapping value mapValue ctx =
14
23
applySequence = mapValue sourceRelation ctx
15
24
in
16
25
Scala . Apply ( Scala . Select variable " in" ) [ Scala . ArgValue Nothing applySequence ]
26
+ Value . Apply _ ( Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " map" ] )) projection) sourceRelation ->
27
+ generateForListMap projection sourceRelation ctx mapValue
28
+ Value . Apply _ ( Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " filter" ] )) predicate) sourceRelation ->
29
+ generateForListFilter predicate sourceRelation ctx mapValue
30
+ Value . Apply _ ( Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " filter" , " map" ] )) predicateAction) sourceRelation ->
31
+ generateForListFilterMap predicateAction sourceRelation ctx mapValue
32
+ Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " sum" ] )) collection ->
33
+ generateForListSum collection ctx mapValue
34
+ _ ->
35
+ Scala . Literal ( Scala . StringLit " To Do" )
36
+
37
+
38
+ generateForListSum : Value ta (Type () ) -> ValueMappingContext -> MapValueType ta -> Scala .Value
39
+ generateForListSum collection ctx mapValue =
40
+ case collection of
41
+ Value . Apply _ ( Value . Apply _ ( Value . Reference _ ( [ [ " morphir" ], [ " s" , " d" , " k" ] ], [ [ " list" ] ], [ " map" ] )) _) sourceRelation ->
42
+ if isCandidateForDataFrame ( valueAttribute sourceRelation) ctx. typesContextInfo then
43
+ case mapValue collection ctx of
44
+ Scala . Apply col [ Scala . ArgValue argName projectedExpr] ->
45
+ let
46
+ resultName = Scala . Literal ( Scala . StringLit " result" )
47
+ asCall = Scala . Apply ( Scala . Select projectedExpr " as" ) [ Scala . ArgValue Nothing resultName]
48
+ newSelect = Scala . Apply col [ Scala . ArgValue argName asCall]
49
+ sumCall = applySnowparkFunc " sum" [ applySnowparkFunc " col" [ resultName]]
50
+ in
51
+ Scala . Apply ( Scala . Select newSelect " select" ) [ Scala . ArgValue Nothing sumCall]
52
+ _ ->
53
+ Scala . Literal ( Scala . StringLit " Unsupported sum scenario" )
54
+ else
55
+ Scala . Literal ( Scala . StringLit " Unsupported sum scenario" )
56
+ _ ->
57
+ Scala . Literal ( Scala . StringLit " Unsupported sum scenario" )
58
+
59
+ generateForListFilter : Value ta (Type () ) -> (Value ta (Type () )) -> ValueMappingContext -> MapValueType ta -> Scala .Value
60
+ generateForListFilter predicate sourceRelation ctx mapValue =
61
+ if isCandidateForDataFrame ( valueAttribute sourceRelation) ctx. typesContextInfo then
62
+ case predicate of
63
+ Value . Lambda _ _ binExpr ->
64
+ Scala . Apply ( Scala . Select ( mapValue sourceRelation ctx) " filter" ) [ Scala . ArgValue Nothing <| mapValue binExpr ctx]
65
+ _ ->
66
+ Scala . Literal ( Scala . StringLit " To Do" )
67
+ else
68
+ Scala . Literal ( Scala . StringLit " Unsupported filter scenario" )
69
+
70
+
71
+ generateForListFilterMap : Value ta (Type () ) -> (Value ta (Type () )) -> ValueMappingContext -> MapValueType ta -> Scala .Value
72
+ generateForListFilterMap predicate sourceRelation ctx mapValue =
73
+ if isCandidateForDataFrame ( valueAttribute sourceRelation) ctx. typesContextInfo then
74
+ case predicate of
75
+ Value . Lambda _ _ binExpr ->
76
+ let
77
+ selectCall = Scala . Apply ( Scala . Select ( mapValue sourceRelation ctx) " select" ) [ Scala . ArgValue Nothing <| mapValue binExpr ctx]
78
+ resultId = Scala . Literal <| Scala . StringLit " result"
79
+ selectColumnAlias = Scala . Apply ( Scala . Select selectCall " as " ) [ Scala . ArgValue Nothing resultId ]
80
+ isNotNullCall = Scala . Select ( applySnowparkFunc " col" [ resultId ] ) " is_not_null"
81
+ in
82
+ Scala . Apply ( Scala . Select selectColumnAlias " filter" ) [ Scala . ArgValue Nothing isNotNullCall]
83
+ _ ->
84
+ Scala . Literal ( Scala . StringLit " Unsupported filterMap scenario" )
85
+ else
86
+ Scala . Literal ( Scala . StringLit " Unsupported filterMap scenario" )
87
+
88
+ generateForListMap : Value ta (Type () ) -> (Value ta (Type () )) -> ValueMappingContext -> MapValueType ta -> Scala .Value
89
+ generateForListMap projection sourceRelation ctx mapValue =
90
+ if isCandidateForDataFrame ( valueAttribute sourceRelation) ctx. typesContextInfo then
91
+ case processLambdaWithRecordBody projection ctx mapValue of
92
+ Just arguments ->
93
+ Scala . Apply ( Scala . Select ( mapValue sourceRelation ctx) " select" ) arguments
94
+ Nothing ->
95
+ Scala . Literal ( Scala . StringLit " Unsupported map scenario" )
96
+ else
97
+ Scala . Literal ( Scala . StringLit " Unsupported map scenario" )
98
+
99
+ processLambdaWithRecordBody : Value ta (Type () ) -> ValueMappingContext -> MapValueType ta -> Maybe (List Scala .ArgValue )
100
+ processLambdaWithRecordBody functionExpr ctx mapValue =
101
+ case functionExpr of
102
+ Value . Lambda ( Type . Function _ _ returnType) ( Value . AsPattern _ _ _) ( Value . Record _ fields) ->
103
+ if isAnonymousRecordWithSimpleTypes returnType ctx. typesContextInfo then
104
+ Just ( fields
105
+ |> Dict . toList
106
+ |> List . map ( \ ( fieldName, value) -> ( Name . toCamelCase fieldName, ( mapValue value ctx)))
107
+ |> List . map ( \ ( fieldName, value) -> Scala . Apply ( Scala . Select value " as" ) [ Scala . ArgValue Nothing ( Scala . Literal ( Scala . StringLit fieldName)) ] )
108
+ |> List . map ( Scala . ArgValue Nothing ))
109
+ else
110
+ Nothing
111
+ Value . Lambda ( Type . Function _ _ returnType) ( Value . AsPattern _ _ _) expr ->
112
+ if isBasicType returnType then
113
+ Just [ Scala . ArgValue Nothing <| mapValue expr ctx ]
114
+ else
115
+ Nothing
17
116
_ ->
18
- Scala . Literal ( Scala . StringLit " To Do " )
117
+ Nothing
0 commit comments