-
Notifications
You must be signed in to change notification settings - Fork 33
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implement Scala Matchbox UDFs in Python.
- Resolves #408 - Alphabetizes DataFrameloader functions - Alphabetizes UDFs functions - Move DataFrameLoader to df packages - Move UDFs out of df into their own package - Rename UDFs (no more DF tagged to the end). - Update tests as necessary - Partially addresses #410, #409 - Supersedes #412.
- Loading branch information
Showing
19 changed files
with
301 additions
and
188 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,4 @@ | ||
from aut.common import WebArchive | ||
from aut.udfs import extract_domain | ||
from aut.udfs import Udf | ||
|
||
__all__ = ["WebArchive", "extract_domain"] | ||
__all__ = ["WebArchive", "Udf"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,115 @@ | ||
from pyspark.sql.functions import udf | ||
from pyspark.sql.types import StringType | ||
from pyspark import SparkContext | ||
from pyspark.sql.column import Column, _to_java_column, _to_seq | ||
from pyspark.sql.functions import col | ||
|
||
|
||
def extract_domain_func(url): | ||
url = url.replace("http://", "").replace("https://", "") | ||
if "/" in url: | ||
return url.split("/")[0].replace("www.", "") | ||
else: | ||
return url.replace("www.", "") | ||
class Udf: | ||
def compute_image_size(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.computeImageSize() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def compute_md5(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.computeMD5().apply | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
extract_domain = udf(extract_domain_func, StringType()) | ||
def compute_sha1(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.computeSHA1().apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def detect_language(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.detectLanguage() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def detect_mime_type_tika(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.detectMimeTypeTika() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def extract_boilerplate(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.extractBoilerpipeText() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def extract_date(col, dates): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.extractDate().apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def extract_domain(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.extractDomain() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def extract_image_links(col, image_links): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.extractImageLinks() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col, image_links], _to_java_column))) | ||
|
||
def extract_links(col, links): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.extractLinks().apply | ||
) | ||
return Column(udf(_to_seq(sc, [col, links], _to_java_column))) | ||
|
||
def get_extension_mime(col, mime): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.getExtensionMime().apply | ||
) | ||
return Column(udf(_to_seq(sc, [col, mime], _to_java_column))) | ||
|
||
def remove_http_header(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.removeHTTPHeader() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def remove_html(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = sc.getOrCreate()._jvm.io.archivesunleashed.udfs.package.removeHTML().apply | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) | ||
|
||
def remove_prefix_www(col): | ||
sc = SparkContext.getOrCreate() | ||
udf = ( | ||
sc.getOrCreate() | ||
._jvm.io.archivesunleashed.udfs.package.removePrefixWWW() | ||
.apply | ||
) | ||
return Column(udf(_to_seq(sc, [col], _to_java_column))) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.