Browse Source

small changes

master
Cristina Cochior 5 years ago
parent
commit
8f40345367
  1. BIN
      .DS_Store
  2. BIN
      __pycache__/contextualise.cpython-37.pyc
  3. 45405
      allhtml.txt
  4. 42870
      allhtml_proto.txt
  5. 2
      contextualise.py
  6. 3
      generate_links.py
  7. 2
      mostcommon.txt
  8. 2
      putallhtmltexttogether.py
  9. BIN
      static/.DS_Store
  10. 3
      static/css/main.css
  11. 12
      templates/about.html
  12. 6
      templates/home.html
  13. 7
      templates/layout.html
  14. 61
      textedit.py
  15. 2
      wordlist.json
  16. 37
      wordlist.py
  17. 1
      wordlist_proto.json

BIN
.DS_Store

Binary file not shown.

BIN
__pycache__/contextualise.cpython-37.pyc

Binary file not shown.

45405
allhtml.txt

File diff suppressed because one or more lines are too long

42870
allhtml_proto.txt

File diff suppressed because one or more lines are too long

2
contextualise.py

@ -138,7 +138,7 @@ def description():
session["id"].append(itemid)
for file in datafromjson:
if file.lower().endswith(('.html')):
with open("static/"+file,"r", encoding='utf-8') as f:
with open("static/"+file,"r", encoding='ISO-8859-1') as f:
textfile = f.read()
textfile = Markup(textfile)
textfiles.append(textfile)

3
generate_links.py

@ -1,4 +1,3 @@
#!/usr/bin/env python
import sys, os
import json
import re
@ -11,7 +10,7 @@ for path, subdirs, files in os.walk(path):
for name in files:
if name.endswith('html'):
file = os.path.join(path, name)
with open(file, 'r+', encoding='utf-8') as f:
with open(file, 'r+', encoding='ISO-8859-1') as f:
textfile = f.read()
for word in wordlist_dict:
word = re.escape(word)

2
mostcommon.txt

@ -1 +1 @@
[('graphic', 540), ('sound', 510), ('Rotterdam', 480), ('nl', 480), ('music', 450), ('notation', 420), ('project', 420), ('de', 390), ('new', 360), ('The', 360), ('DE', 360), ('PLAYER', 360), ('TGC', 330), ('art', 300), ('3', 300), ('van', 270), ('performance', 270), ('Gamma', 270), ('Circulaire', 270), ('event', 240), ('Tetra', 240), ("'", 240), ('score', 210), ('release', 210), ('Kris', 210), ('2017', 180), ('artists', 180), ('scores', 180), ('Antwerp', 180), ('2.0', 180), ('George', 180), ('I', 180), ('Remco', 150), ('Bladel', 150), ('For', 150), ('publishing', 150), ('Score', 150), ('us', 150), ('XPUB', 150), ('magazine', 150), ('Media', 150), ('2018', 150), ('Paradiso', 150), ('This', 150), ('research', 150), ('Vaast', 150), ('Colson', 150), ('Art', 150), ('avant-garde', 150), ('Remörk', 150)]
[('The', 201), ('sound', 134), ('I', 128), ('music', 124), ('work', 74), ('project', 70), ('new', 68), ('record', 67), ('performance', 60), ('This', 59), ('It', 58), ('In', 56), ('graphic', 54), ('also', 52), ('notation', 48), ('art', 48), ("'", 47), ('time', 47), ('DE', 44), ('scores', 44), ('works', 43), ('one', 43), ('event', 42), ('made', 42), ('PLAYER', 41), ('live', 41), ("'s", 41), ('He', 41), ('score', 39), ('artist', 39), (';', 37), ('records', 37), ('Rotterdam', 35), ('Pushing', 35), ('first', 35), ('A', 34), ('For', 33), ('like', 33), ('used', 33), ('artists', 32), ('part', 32), ('would', 32), ('idea', 30), ('use', 30), ('MIDI', 30), ('Scores', 29), ('piece', 29), ('really', 29), ('&', 28), ('analog', 28)]

2
putallhtmltexttogether.py

@ -12,7 +12,7 @@ for path, subdirs, files in os.walk(path):
if name.endswith('html'):
file = os.path.join(path, name)
total = open("allhtml.txt", "a")
with open(file) as f:
with open(file, 'r+', encoding='ISO-8859-1') as f:
content = f.read()
total.write(content)
total.close()

BIN
static/.DS_Store

Binary file not shown.

3
static/css/main.css

@ -18,6 +18,9 @@ a{
text-decoration: none;
}
a:visited {
color:purple;
}
ul{
list-style: none;

12
templates/about.html

@ -5,18 +5,12 @@
<p>Pushing scores is a two years long project initiated by De Player(*) on music notation and.....
The archive is composed of....
The concept for this archive is....
This archive was conceived and developped by.... (Cricri, Juju)
This archive was conceived and developped by....
Using Flask, Jinja, Python, suboptimal brain power.
Performers for the launch... (Nick, Christine, surprise)
Performers for the launch...
<br />
* De Player is a Rotterdam based polymorphic production platform for performance art, experimental music and visual arts. <br />
** Varia is
** Varia is
</p>
</div>

6
templates/home.html

@ -1,7 +1,7 @@
{% extends "layout.html" %}
{% block content %}
<div class="home">
<h1>Hi!</h1>
<p>Shouldn't this be the page with all the links? Then we may remove description as a clickable link no?</p>
<h1>Start</h1>
<p>This will be the page with all the keywords.</p>
</div>
{% endblock %}
{% endblock %}

7
templates/layout.html

@ -13,11 +13,10 @@
<h1 class="logo">P.u.s.h.i.n.g.S.c.o.r.e.s</h1>
<strong><nav>
<ul class="menu">
<li><a href="{{ url_for('home') }}">Maybe all links for home page</a></li>
<li><a href="{{ url_for('about') }}">About</a></li>
<li><a href="{{ url_for('description') }}">Maybe we cut out this link</a></li>
<li><a href="{{ url_for('listofwords') }}">Dunno if useful?</a></li>
<li><a href="{{ url_for('get_file') }}">:-) Pushiiing</a></li>
<li><a href="{{ url_for('home') }}">All links for home page</a></li>
<li><a href="{{ url_for('listofwords') }}">Details of navigation</a></li>
<li><a href="{{ url_for('get_file') }}">Generate the score</a></li>
</ul>
</nav></strong>
</div>

61
textedit.py

@ -1,5 +1,3 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os
from nltk import sent_tokenize, word_tokenize
from nltk import everygrams
@ -20,21 +18,13 @@ for path, subdirs, files in os.walk(path):
if name.endswith('html'):
file = os.path.join(path, name)
total = open("allhtml.txt", "a")
with open(file) as f:
with open(file, 'r+', encoding='ISO-8859-1') as f:
content = f.read()
total.write(content)
total.close()
keyword_list = []
# with open('allhtml.txt') as f:
# content = f.read()
# tokens = word_tokenize(content)
# tokens = [token for token in tokens if token not in stopws]
# freq_file=FreqDist(tokens)
# print(tokens)
# keyword_list.append(freq_file.most_common(50))
# print(keyword_list[0])
with open('allhtml.txt') as f:
content = f.read()
@ -57,7 +47,7 @@ sentences_w_word = {}
def analysis(the_word, file_name):
id = file_name[13:15]
with open(file_name) as f:
with open(file_name, 'r+', encoding='ISO-8859-1') as f:
content = f.read()
sent_tokens = sent_tokenize(content)
new_sent_tokens = []
@ -83,50 +73,3 @@ for path, subdirs, files in os.walk(path):
with open('wordlist.json', 'w', encoding="utf8") as outfile:
json.dump(sentences_w_word, outfile, ensure_ascii=False)
# def analysis(file, id):
# sent_tokens = sent_tokenize(file) # sentence tokenizing
# for sent_token in sent_tokens:
# tokens = word_tokenize(sent_token) # word tokenizing
# print(tokens)
# for token in tokens:
# for first in keyword_list:
# if token == first: # if token is in keyword_list
# if token not in wordlist:
# wordlist[token] = []
# sent_dict = {}
# sent_dict["id"]=id
# sent_dict["sentence"] = sent_token.replace('\n', ' ')
# wordlist[token].append(sent_dict)
# elif token not in avoiding_repetition:
# # print(wordlist[token])
# sent_dict = {}
# sent_dict["id"]=id
# sent_dict["sentence"] = sent_token.replace('\n', ' ')
# wordlist[token].append(sent_dict)
# avoiding_repetition.append(token)
# with open('static/files/17/17.blurb.html') as f:
# content = f.read()
# analysis(content, '17')
# # reading each individual html file
# path = "static/files/"
# for path, subdirs, files in os.walk(path):
# for name in files:
# if name.endswith('html'):
# file = os.path.join(path, name)
# with open(file) as f:
# content = f.read()
# id=name[:2]
# analysis(content, id)
# json_wordlist = json.dumps(wordlist)
# for item in wordlist:
# for item2 in wordlist[item]:
# print(item)
# print(item2["sentence"])
# print("\n")

2
wordlist.json

File diff suppressed because one or more lines are too long

37
wordlist.py

@ -1,37 +0,0 @@
[
"item" : "17",
"sentences" : [
"George Brecht (August 27, 1926 – December 5, 2008), born George Ellis MacDiarmid, was an American conceptual artist and avant-garde composer, as well as a professional chemist who worked as a consultant for companies including Pfizer, Johnson & Johnson, and Mobil Oil.",
"He was a key member of, and influence on, Fluxus, the international group of avant-garde artists centred on George Maciunas, having been involved with the group from the first performances in Wiesbaden 1962 until Maciunas' death in 1978.",
"One of the originators of 'participatory' art, in which the artwork can only be experienced by the active involvement of the viewer, he is most famous for his Event Scores such as Drip Music 1962, and is widely seen as an important precursor to conceptual art."
"He described his own art as a way of 'ensuring that the details of everyday life, the random constellations of objects that surround us, stop going unnoticed.'",
"Steve Joy took me to meet George Brecht in his studio when I was in residence at St Michael's in Manhattan (c.1962). ",
"We became friends and GB mailed instruction cards to me.",
"I brought Steve Joy to St Vincent College when I returned to the monastery from Paris in 1963. GB agreed to provide instructions for an event at St Vincent."
"For his 'Vehicle Sundown Event', GB published a set of about 50 cards to be given to participants who participated in the event with their vehicles.",
"Each card held an instruction to be performed with a vehicle.",
"Drivers were instructed to assemble at sundown in a parking lot and randomly park their vehicles.",
"Then each driver, with a shuffled deck of instructions, would begin performing at the sound of a signal.",
"Participants performed about 50 events such as 'turn on lights', 'start engine', 'stop engine', 'open window'.",
"This work was performed at St Vincent College under the direction of Stephen Joy with Roman Verostko assisting. c. 1963 ( I can confirm that Fr Melvin Ruprecht participated.",
"I believe it was before I went to Washington as NCE editor, rv0"
],
"item" : "00",
"sentences" : []
]

1
wordlist_proto.json

File diff suppressed because one or more lines are too long
Loading…
Cancel
Save