nglk
2 years ago
2 changed files with 234 additions and 0 deletions
@ -0,0 +1,134 @@ |
|||
{ |
|||
"cells": [ |
|||
{ |
|||
"cell_type": "code", |
|||
"execution_count": 1, |
|||
"id": "877811c4", |
|||
"metadata": {}, |
|||
"outputs": [ |
|||
{ |
|||
"name": "stdout", |
|||
"output_type": "stream", |
|||
"text": [ |
|||
"Defaulting to user installation because normal site-packages is not writeable\n", |
|||
"Collecting tensorflow==1.15.0\n", |
|||
" Downloading tensorflow-1.15.0-cp36-cp36m-manylinux2010_x86_64.whl (412.3 MB)\n", |
|||
"\u001b[K |████████████████████████████████| 412.3 MB 25 kB/s eta 0:00:0165 |██ | 24.8 MB 102 kB/s eta 1:03:11 |███▋ | 46.6 MB 3.5 MB/s eta 0:01:45 |████▌ | 57.8 MB 7.2 MB/s eta 0:00:49 |███████████▏ | 143.7 MB 1.7 MB/s eta 0:02:42 |█████████████████▋ | 227.2 MB 1.2 MB/s eta 0:02:34 |███████████████████████▍ | 300.8 MB 502 kB/s eta 0:03:42 |████████████████████████▋ | 317.0 MB 2.8 MB/s eta 0:00:35 |█████████████████████████ | 323.2 MB 2.8 MB/s eta 0:00:32 |█████████████████████████████▍ | 378.4 MB 93 kB/s eta 0:06:05 |█████████████████████████████▌ | 379.4 MB 24 kB/s eta 0:22:15ta 0:12:52�██▍| 404.4 MB 29 kB/s eta 0:04:34\n", |
|||
"\u001b[33mWARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='pypi.org', port=443): Read timed out. (read timeout=15)\",)': /simple/keras/\u001b[0m\n", |
|||
"\u001b[?25hCollecting keras==2.2.5\n", |
|||
" Downloading Keras-2.2.5-py2.py3-none-any.whl (336 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 336 kB 124 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hRequirement already satisfied: wheel>=0.26 in /usr/lib/python3/dist-packages (from tensorflow==1.15.0) (0.30.0)\n", |
|||
"Collecting absl-py>=0.7.0\n", |
|||
" Downloading absl_py-1.3.0-py3-none-any.whl (124 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 124 kB 109 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hRequirement already satisfied: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==1.15.0) (1.16.2)\n", |
|||
"Collecting opt-einsum>=2.3.2\n", |
|||
" Downloading opt_einsum-3.3.0-py3-none-any.whl (65 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 65 kB 119 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hProcessing /home/angeliki/.cache/pip/wheels/7c/06/54/bc84598ba1daf8f970247f550b175aaaee85f68b4b0c5ab2c6/termcolor-1.1.0-cp36-none-any.whl\n", |
|||
"Collecting gast==0.2.2\n", |
|||
" Downloading gast-0.2.2.tar.gz (10 kB)\n", |
|||
"Collecting astor>=0.6.0\n", |
|||
" Downloading astor-0.8.1-py2.py3-none-any.whl (27 kB)\n", |
|||
"Collecting keras-preprocessing>=1.0.5\n", |
|||
" Downloading Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 42 kB 123 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hRequirement already satisfied: six>=1.10.0 in /usr/lib/python3/dist-packages (from tensorflow==1.15.0) (1.11.0)\n", |
|||
"Collecting google-pasta>=0.1.6\n", |
|||
" Downloading google_pasta-0.2.0-py3-none-any.whl (57 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 57 kB 119 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting tensorboard<1.16.0,>=1.15.0\n", |
|||
" Downloading tensorboard-1.15.0-py3-none-any.whl (3.8 MB)\n", |
|||
"\u001b[K |████████████████████████████████| 3.8 MB 123 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting tensorflow-estimator==1.15.1\n", |
|||
" Downloading tensorflow_estimator-1.15.1-py2.py3-none-any.whl (503 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 503 kB 129 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting grpcio>=1.8.6\n", |
|||
" Downloading grpcio-1.48.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.6 MB)\n", |
|||
"\u001b[K |████████████████████████████████| 4.6 MB 104 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting protobuf>=3.6.1\n", |
|||
" Downloading protobuf-3.19.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)\n", |
|||
"\u001b[K |████████████████████████████████| 1.1 MB 106 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting keras-applications>=1.0.8\n", |
|||
" Downloading Keras_Applications-1.0.8-py3-none-any.whl (50 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 50 kB 104 kB/s ta 0:00:01\n", |
|||
"\u001b[?25hCollecting wrapt>=1.11.1\n", |
|||
" Downloading wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (74 kB)\n", |
|||
"\u001b[K |████████████████████████████████| 74 kB 123 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hCollecting h5py\n", |
|||
" Downloading h5py-3.1.0-cp36-cp36m-manylinux1_x86_64.whl (4.0 MB)\n", |
|||
"\u001b[K |████████████████████████████████| 4.0 MB 122 kB/s eta 0:00:01\n", |
|||
"\u001b[?25hRequirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from keras==2.2.5) (1.2.1)\n", |
|||
"Requirement already satisfied: pyyaml in /usr/lib/python3/dist-packages (from keras==2.2.5) (3.12)\n", |
|||
"Collecting setuptools>=41.0.0\n", |
|||
" Using cached setuptools-59.6.0-py3-none-any.whl (952 kB)\n", |
|||
"Requirement already satisfied: markdown>=2.6.8 in /home/angeliki/.local/lib/python3.6/site-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.3.7)\n", |
|||
"Requirement already satisfied: werkzeug>=0.11.15 in /home/angeliki/.local/lib/python3.6/site-packages (from tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (1.0.1)\n", |
|||
"Collecting cached-property; python_version < \"3.8\"\n", |
|||
" Downloading cached_property-1.5.2-py2.py3-none-any.whl (7.6 kB)\n", |
|||
"Requirement already satisfied: importlib-metadata>=4.4; python_version < \"3.10\" in /home/angeliki/.local/lib/python3.6/site-packages (from markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (4.8.3)\n", |
|||
"Requirement already satisfied: typing-extensions>=3.6.4; python_version < \"3.8\" in /home/angeliki/.local/lib/python3.6/site-packages (from importlib-metadata>=4.4; python_version < \"3.10\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (4.1.1)\n", |
|||
"Requirement already satisfied: zipp>=0.5 in /home/angeliki/.local/lib/python3.6/site-packages (from importlib-metadata>=4.4; python_version < \"3.10\"->markdown>=2.6.8->tensorboard<1.16.0,>=1.15.0->tensorflow==1.15.0) (3.5.0)\n", |
|||
"Building wheels for collected packages: gast\n", |
|||
" Building wheel for gast (setup.py) ... \u001b[?25ldone\n", |
|||
"\u001b[?25h Created wheel for gast: filename=gast-0.2.2-py3-none-any.whl size=7635 sha256=0fbcf07e73f4b08cdded16a5d95b5a9cc41515bd16f54b33e6e3e40311867af4\n", |
|||
" Stored in directory: /home/angeliki/.cache/pip/wheels/19/a7/b9/0740c7a3a7d1d348f04823339274b90de25fbcd217b2ee1fbe\n", |
|||
"Successfully built gast\n", |
|||
"\u001b[31mERROR: launchpadlib 1.10.6 requires testresources, which is not installed.\u001b[0m\n", |
|||
"Installing collected packages: absl-py, opt-einsum, termcolor, gast, astor, keras-preprocessing, google-pasta, grpcio, setuptools, protobuf, tensorboard, tensorflow-estimator, cached-property, h5py, keras-applications, wrapt, tensorflow, keras\n", |
|||
" Attempting uninstall: setuptools\n", |
|||
" Found existing installation: setuptools 40.8.0\n", |
|||
" Uninstalling setuptools-40.8.0:\n", |
|||
" Successfully uninstalled setuptools-40.8.0\n", |
|||
"\u001b[31mERROR: Could not install packages due to an EnvironmentError: [Errno 28] No space left on device\n", |
|||
"\u001b[0m\n", |
|||
"\u001b[33mWARNING: You are using pip version 20.0.2; however, version 21.3.1 is available.\n", |
|||
"You should consider upgrading via the '/usr/bin/python3 -m pip install --upgrade pip' command.\u001b[0m\n" |
|||
] |
|||
} |
|||
], |
|||
"source": [ |
|||
"import sys\n", |
|||
"!{sys.executable} -m pip install tensorflow==1.15.0 keras==2.2.5\n" |
|||
] |
|||
}, |
|||
{ |
|||
"cell_type": "code", |
|||
"execution_count": null, |
|||
"id": "31e0fc42", |
|||
"metadata": {}, |
|||
"outputs": [], |
|||
"source": [] |
|||
}, |
|||
{ |
|||
"cell_type": "code", |
|||
"execution_count": null, |
|||
"id": "618fb0f8", |
|||
"metadata": {}, |
|||
"outputs": [], |
|||
"source": [] |
|||
} |
|||
], |
|||
"metadata": { |
|||
"kernelspec": { |
|||
"display_name": "Python 3", |
|||
"language": "python", |
|||
"name": "python3" |
|||
}, |
|||
"language_info": { |
|||
"codemirror_mode": { |
|||
"name": "ipython", |
|||
"version": 3 |
|||
}, |
|||
"file_extension": ".py", |
|||
"mimetype": "text/x-python", |
|||
"name": "python", |
|||
"nbconvert_exporter": "python", |
|||
"pygments_lexer": "ipython3", |
|||
"version": "3.6.9" |
|||
} |
|||
}, |
|||
"nbformat": 4, |
|||
"nbformat_minor": 5 |
|||
} |
@ -0,0 +1,100 @@ |
|||
{ |
|||
"cells": [ |
|||
{ |
|||
"cell_type": "code", |
|||
"execution_count": null, |
|||
"id": "9e50b7b2", |
|||
"metadata": {}, |
|||
"outputs": [], |
|||
"source": [ |
|||
"import random\n", |
|||
"import time\n", |
|||
"import json\n", |
|||
"import wget\n", |
|||
"\n", |
|||
"url = \"https://pad.vvvvvvaria.org/any_one_day_this_dictionary_has_died/export/txt\"\n", |
|||
"wget.download(url, 'any_one_day_this_dictionary_has_died.json')\n", |
|||
" \n", |
|||
"with open('any_one_day_this_dictionary_has_died.json', 'r') as f:\n", |
|||
" wordmord = json.loads(f.read().replace(\"'\", '\"'))\n", |
|||
"\n", |
|||
"#print(wordmord['death']['para-etymology'][0])\n", |
|||
"\n", |
|||
"sentence = input('give me your words / δώσε μου κείμενο: ')\n", |
|||
"\n", |
|||
"def makedemonic():\n", |
|||
"\tnew_sentence = sentence\n", |
|||
"\tfor word in wordmord:\n", |
|||
"\t\tif word in new_sentence:\n", |
|||
"\t\t\tnew_sentence = new_sentence.replace(word, random.choice(wordmord[word]['paramyms']))\n", |
|||
"\tprint(new_sentence)\n", |
|||
"\n", |
|||
"def makepara():\n", |
|||
"\tnew_sentence = sentence\n", |
|||
"\tfor word in wordmord:\n", |
|||
"\t\tif word in new_sentence:\n", |
|||
"\t\t\tnew_sentence = new_sentence.replace(word, random.choice(wordmord[word]['parameanings']))\n", |
|||
"\tprint(new_sentence)\n", |
|||
"\n", |
|||
"def makepira():\n", |
|||
"\tnew_sentence = sentence\n", |
|||
"\tfor word in wordmord:\n", |
|||
"\t\tif word in new_sentence:\n", |
|||
"\t\t\tnew_sentence = new_sentence.replace(word, random.choice(wordmord[word]['paradoxes']))\n", |
|||
"\tprint(new_sentence)\n", |
|||
"\n", |
|||
"\n", |
|||
"type = input('choose type of transformation / τύπος μετάλλαξης: ')\n", |
|||
"\n", |
|||
"if type == 'demonic':\n", |
|||
"\tmakedemonic()\n", |
|||
"elif type == 'para':\n", |
|||
"\tmakepara()\n", |
|||
"elif type == 'pira':\n", |
|||
"\tmakepira()\n", |
|||
"else:\n", |
|||
"\tmakedemonic()\n", |
|||
"\ttime.sleep(1)\n", |
|||
"\tmakepara()\n", |
|||
"\ttime.sleep(1)\n", |
|||
"\tmakepira()\n", |
|||
"\n" |
|||
] |
|||
}, |
|||
{ |
|||
"cell_type": "markdown", |
|||
"id": "2b7227d4", |
|||
"metadata": {}, |
|||
"source": [] |
|||
}, |
|||
{ |
|||
"cell_type": "code", |
|||
"execution_count": null, |
|||
"id": "add48660", |
|||
"metadata": {}, |
|||
"outputs": [], |
|||
"source": [] |
|||
} |
|||
], |
|||
"metadata": { |
|||
"kernelspec": { |
|||
"display_name": "Python 3", |
|||
"language": "python", |
|||
"name": "python3" |
|||
}, |
|||
"language_info": { |
|||
"codemirror_mode": { |
|||
"name": "ipython", |
|||
"version": 3 |
|||
}, |
|||
"file_extension": ".py", |
|||
"mimetype": "text/x-python", |
|||
"name": "python", |
|||
"nbconvert_exporter": "python", |
|||
"pygments_lexer": "ipython3", |
|||
"version": "3.6.9" |
|||
} |
|||
}, |
|||
"nbformat": 4, |
|||
"nbformat_minor": 5 |
|||
} |
Loading…
Reference in new issue