Skip to content

Commit

Permalink
switched from itoken_parallel to itoken
Browse files Browse the repository at this point in the history
  • Loading branch information
BERENZ committed Nov 5, 2023
1 parent a0c561c commit 031c2d2
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion .Rproj.user/E3DB6272/pcs/files-pane.pper
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
"ascending": true
}
],
"path": "~/git/nauka/ncn-foreigners/software/blocking/.github/workflows"
"path": "~/git/nauka/ncn-foreigners/software/blocking/inst/tinytest"
}
2 changes: 1 addition & 1 deletion .Rproj.user/E3DB6272/pcs/source-pane.pper
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
{
"activeTab": 3,
"activeTab": 0,
"activeTabSourceWindow0": 0
}
4 changes: 2 additions & 2 deletions R/blocking.R
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ blocking <- function(x,
if (verbose %in% 1:2) cat("===== creating tokens =====\n")

## tokens for x
l_tokens <- text2vec::itoken_parallel(
l_tokens <- text2vec::itoken(
iterable = x,
tokenizer = function(x) tokenizers::tokenize_character_shingles(x, n = control_txt$n_shingles),
n_chunks = control_txt$n_chunks,
Expand All @@ -122,7 +122,7 @@ blocking <- function(x,
if (is.null(y_default)) {
l_dtm_y <- l_dtm
} else {
l_tokens_y <- text2vec::itoken_parallel(
l_tokens_y <- text2vec::itoken(
iterable = y,
tokenizer = function(x) tokenizers::tokenize_character_shingles(x, n = control_txt$n_shingles),
n_chunks = control_txt$n_chunks,
Expand Down

0 comments on commit 031c2d2

Please sign in to comment.