mirror of
https://github.com/fastai/fastbook.git
synced 2025-04-05 18:30:44 +00:00
Merge d27b447baf
into 2c5db23a34
This commit is contained in:
commit
2121715786
@ -922,7 +922,7 @@
|
|||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"x,y = to_cpu(dls.train.one_batch())\n",
|
"x,y = to_cpu(dls.train.one_batch())\n",
|
||||||
"activs = learn.model(x)\n",
|
"activs = TensorBase(learn.model(x))\n",
|
||||||
"activs.shape"
|
"activs.shape"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -7,7 +7,8 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"#hide\n",
|
"#hide\n",
|
||||||
"! [ -e /content ] && pip install -Uqq fastbook kaggle waterfallcharts treeinterpreter dtreeviz\n",
|
"! [ -e /content ] && pip install -Uqq fastbook kaggle waterfallcharts dtreeviz\n",
|
||||||
|
"!pip install treeinterpreter\n",
|
||||||
"import fastbook\n",
|
"import fastbook\n",
|
||||||
"fastbook.setup_book()"
|
"fastbook.setup_book()"
|
||||||
]
|
]
|
||||||
@ -7664,7 +7665,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"#hide\n",
|
"#hide\n",
|
||||||
"# pip install —pre -f https://sklearn-nightly.scdn8.secure.raxcdn.com scikit-learn —U"
|
"# pip install --pre -f https://sklearn-nightly.scdn8.secure.raxcdn.com scikit-learn --upgrade"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -8751,6 +8752,8 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"from treeinterpreter import treeinterpreter\n",
|
"from treeinterpreter import treeinterpreter\n",
|
||||||
"from waterfall_chart import plot as waterfall"
|
"from waterfall_chart import plot as waterfall"
|
||||||
|
|
||||||
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -153,9 +153,11 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"tok = Tokenizer.from_folder(path)\n",
|
"spacy = WordTokenizer()\n",
|
||||||
|
"tok = Tokenizer(spacy)\n",
|
||||||
"tok.setup(txts)\n",
|
"tok.setup(txts)\n",
|
||||||
"toks = txts.map(tok)\n",
|
"toks = txts.map(tok)\n",
|
||||||
|
|
||||||
"toks[0]"
|
"toks[0]"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
Loading…
Reference in New Issue
Block a user