Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions campus/fhda/fhda_campus.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,20 +37,21 @@ def load_db(self, campus, year, quarter):
name = f'{year}{quarter_num}{self.CAMPUS_TO_NUM[campus]}'

try:
db = TinyDB(join(DB_DIR, f'{name}_database.json'), access_mode='r')
db = TinyDB(join(DB_DIR, f'merge_{name}_database.json'), access_mode='r')
except FileNotFoundError:
# raise FileNotFoundError
try:
db = TinyDB(join(DB_DIR, f'new_{name}_database.json'), access_mode='r')
db = TinyDB(join(DB_DIR, f'sched_{name}_database.json'), access_mode='r')
except FileNotFoundError:
# raise FileNotFoundError
try:
db = TinyDB(join(DB_DIR, f'sched_{name}_database.json'), access_mode='r')
db = TinyDB(join(DB_DIR, f'new_{name}_database.json'), access_mode='r')
except FileNotFoundError:
raise FileNotFoundError

return db

def load_multi_db(self, campus):
return TinyDB(join(DB_DIR, 'multi_database.json'), access_mode='r')

def list_dbs(self, campus):
with open(join(DB_DIR, 'metadata.json'), 'r') as file:
metadata = json.loads(file.read())
Expand Down
44 changes: 37 additions & 7 deletions campus/fhda/fhda_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,19 @@
from collections import defaultdict

from titlecase import titlecase
from tinydb import TinyDB, where
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware

from logger import log_err, log_warn
from logger import log, log_info, log_err, log_warn
from data.utils import list_dbs
from scraper.ssb_base import BaseHooks
from scraper.ssb_auth_schedule import AdvancedScraper
from scraper.ssb_public_schedule import ScheduleScraper
from scraper.postprocess import postprocess_dbs

from .fhda_login import login
from .fhda_settings import SSB_URL, DB_DIR, CACHE_DIR
from .fhda_settings import SSB_URL, DB_DIR, CACHE_DIR, NUM_TO_QUARTER, NUM_TO_CAMPUS
from .fhda_utils import clean_course_name_str

ENABLE_ADVANCED = True
Expand All @@ -39,6 +43,29 @@ def clean_dept_name(name: str):
return re.sub(r'^(.*\w)-[FHDA]{2}$', r'\1', name)


def get_term_info(term):
year = int(term[0:4])
quarter_num = int(term[4])
quarter = NUM_TO_QUARTER[quarter_num]
term_campus = NUM_TO_CAMPUS[int(term[5])]

if quarter_num < 3:
# If the quarter is summer or fall, then the year should be incremented
# Ex. Fall 2020 => 20212X
year -= 1

return year, quarter, term_campus


def load_db(term, tag, campus, readonly=False):
db_path = join(DB_DIR, f'{tag}_{term}_database.json')

if readonly:
return TinyDB(db_path, access_mode='r', storage=CachingMiddleware(JSONStorage))
else:
return TinyDB(db_path)


class FHDAScraperHooks(BaseHooks):
@staticmethod
def transform_depts(depts):
Expand Down Expand Up @@ -105,10 +132,10 @@ def transform_class(class_data):
hooks=FHDAScraperHooks,
login=login,

max_terms=4,
max_terms=8,
# use_cache=False,
# start_term='202042',
trace=True,
# trace=True,
)
scraper.run()

Expand All @@ -127,10 +154,10 @@ def transform_class(class_data):
hooks=FHDAScraperHooks,
# login=login,

max_terms=4,
max_terms=8,
# use_cache=False,
# start_term='202042',
trace=True,
# start_term='202111',
# trace=True,
)
scraper.run()

Expand All @@ -151,3 +178,6 @@ def transform_class(class_data):

with open(join(DB_DIR, 'metadata.json'), 'w') as outfile:
json.dump({'tags': dict(tagdbs), 'terms': dict(termdbs)}, outfile)

db = TinyDB(join(DB_DIR, 'multi_database.json'))
postprocess_dbs(db, termdbs, get_term_info=get_term_info, load_db=load_db)
15 changes: 15 additions & 0 deletions campus/fhda/fhda_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,21 @@
# Available Campuses - Foothill, De Anza, and test
CAMPUS_LIST = {'fh': '202121', 'da': '202122', 'test': 'test'}

# Cool stuff
QUARTER_TO_NUM = {
'summer': 1,
'fall': 2,
'winter': 3,
'spring': 4
}
NUM_TO_QUARTER = {v: k for k, v in QUARTER_TO_NUM.items()}

CAMPUS_TO_NUM = {
'fh': 1,
'da': 2
}
NUM_TO_CAMPUS = {v: k for k, v in CAMPUS_TO_NUM.items()}

'''
Course Type Flags - Foothill College

Expand Down
3 changes: 3 additions & 0 deletions campus/wvm/wvm_campus.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ def load_db(self, campus, year, quarter):

return db

def load_multi_db(self, campus):
return TinyDB(join(DB_DIR, f'multi_{self.CAMPUS_TO_PREFIX[campus]}_database.json'), access_mode='r')

def list_dbs(self, campus):
with open(join(DB_DIR, 'metadata.json'), 'r') as file:
metadata = json.loads(file.read())
Expand Down
36 changes: 34 additions & 2 deletions campus/wvm/wvm_scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,20 @@
import json
from os.path import join
from copy import deepcopy
from collections import defaultdict

from tinydb import TinyDB, where
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware

from logger import log_err
from data.utils import list_dbs
from scraper.ssb_base import BaseHooks
from scraper.ssb_auth_schedule import AdvancedScraper
from scraper.ssb_public_schedule import ScheduleScraper
from scraper.postprocess import postprocess_dbs

from .wvm_settings import SSB_URL, DB_DIR, CACHE_DIR
from .wvm_settings import SSB_URL, DB_DIR, CACHE_DIR, NUM_TO_QUARTER, PREFIX_TO_CAMPUS


def clean_dept_name(name: str):
Expand All @@ -20,6 +26,23 @@ def clean_dept_name(name: str):
return re.sub(r'^(.*\w) ?- ?[WVMC]{2,3}$', r'\1', name)


def get_term_info(campus):
def get_info(term):
year = int(term[0:4])
quarter = NUM_TO_QUARTER[int(term[4])]
return year, quarter, PREFIX_TO_CAMPUS[campus]
return get_info


def load_db(term, tag, campus, readonly=False):
db_path = join(DB_DIR, f'{tag}_{campus}_{term}_database.json')

if readonly:
return TinyDB(db_path, access_mode='r', storage=CachingMiddleware(JSONStorage))
else:
return TinyDB(db_path)


class WVMScraperHooks(BaseHooks):
@staticmethod
def transform_depts(depts):
Expand Down Expand Up @@ -57,7 +80,7 @@ def transform_class(class_data):
# max_terms=4,
# use_cache=False,
# start_term='201231',
trace=True,
# trace=True,
)
scraper.run()

Expand All @@ -75,3 +98,12 @@ def transform_class(class_data):

with open(join(DB_DIR, 'metadata.json'), 'w') as outfile:
json.dump({'terms': termdbs}, outfile)

ddd = defaultdict(lambda: defaultdict(list))

for info in termdbs:
ddd[info['campus']][info['code']] = ['sched']

for campus, term_dbs in ddd.items():
db = TinyDB(join(DB_DIR, f'multi_{campus}_database.json'))
postprocess_dbs(db, term_dbs, campus=campus, get_term_info=get_term_info(campus), load_db=load_db)
13 changes: 13 additions & 0 deletions campus/wvm/wvm_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,16 @@
SSB_URL = 'https://ssb-prod.ec.wvm.edu/PROD/'
DB_DIR = join(ROOT_DB_DIR, 'wvm')
CACHE_DIR = join(ROOT_CACHE_DIR, 'wvm')

QUARTER_TO_NUM = {
'winter': 1,
'spring': 3,
'summer': 5,
'fall': 7,
}
NUM_TO_QUARTER = {v: k for k, v in QUARTER_TO_NUM.items()}
CAMPUS_TO_PREFIX = {
'wv': 'wvc',
'mc': 'mc',
}
PREFIX_TO_CAMPUS = {v: k for k, v in CAMPUS_TO_PREFIX.items()}
10 changes: 10 additions & 0 deletions data/access.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,16 @@ def load(self, campus, year, quarter):

return ALL_CAMPUS[campus].load_db(campus, year, quarter)

def load_multi_db(self, campus):
self.validate_campus(campus)

return ALL_CAMPUS[campus].load_multi_db(campus)

def one_instructor(self, db, instructor):
return db.table('instructors').get(
where('pretty_id') == instructor
)

def campus_info(self, campus):
self.validate_campus(campus)

Expand Down
6 changes: 5 additions & 1 deletion data/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ class ClassDataSchema(Schema):
# Number of open seats
# seats = fields.Int(required=True, min=0)
seats = fields.Int(min=0)
# Number of filled / taken seats
seats_taken = fields.Int(min=0)
# Number of open waitlist seats
# wait_seats = fields.Int(required=True, min=0)
wait_seats = fields.Int(min=0)
Expand Down Expand Up @@ -98,7 +100,9 @@ class ClassTimeSchema(Schema):
# time = fields.Str(required=True)
start_time = fields.Str(required=True)
end_time = fields.Str(required=True)
instructor = fields.Str(required=True)
# instructor = fields.Str(required=True)
# instructor = fields.List(fields.Str(), required=True)
instructor = fields.List(fields.Raw(), required=True)
location = fields.Str(required=True)
room = fields.Str()
campus = fields.Str()
Expand Down
2 changes: 2 additions & 0 deletions frontend/src/App.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { Router, route } from 'preact-router'
import CollegePage from './pages/CollegePage'
import DeptPage from './pages/DeptPage'
import CoursePage from './pages/CoursePage'
import InstructorPage from './pages/InstructorPage'
import { PageNotFound, CampusNotFound, ErrorPage } from './components/NotFound'
import { campus, PATH_PREFIX } from './data'
import { TermYear, CampusInfo, useRootApi } from './state'
Expand Down Expand Up @@ -80,6 +81,7 @@ export default function App() {
<WrapCampus path={`${PATH_PREFIX}/:college`} page={CollegePage}/>
<WrapCampus path={`${PATH_PREFIX}/:college/dept/:dept`} page={DeptPage}/>
<WrapCampus path={`${PATH_PREFIX}/:college/dept/:dept/course/:course`} page={CoursePage}/>
<WrapCampus path={`${PATH_PREFIX}/:college/instructor/:id`} page={InstructorPage}/>
<PageNotFound default />
</Router>
)
Expand Down
90 changes: 90 additions & 0 deletions frontend/src/components/ClassTable.jsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import { h, Fragment } from 'preact'
import Match from 'preact-router/match'

const Link = Match.Link

const replaceTBA = (text) => (
text === 'TBA'
? (
<span class="none">(none)</span>
)
: text
)

function ClassTimeCols({ time, campusId }) {
const timeString = time.start_time == 'TBA'
? replaceTBA('TBA')
: `${time.start_time || '?'} - ${time.end_time || '?'}`

// const instructors = (time.instructor || [])
// .map(
// ({ full_name, display_name, email }) => display_name || full_name
// )
// .join(', ')

const instructors = (time.instructor || [])
.map(({ full_name, display_name, email, pretty_id }, index, arr) => {
let name = display_name || full_name

if (index < arr.length - 1) {
name += ', '
}

return pretty_id
? <Link href={`/explore/${campusId}/instructor/${pretty_id}`} title={email}>{name}</Link>
: <span>{name}</span>
})
.flat()

return (
<>
<td>{time.type || '?'}</td>
<td>{instructors || '?'}</td>
<td>{replaceTBA(time.days || '?')}</td>
<td>{timeString || '?'}</td>
<td>{time.location || '?'}</td>
</>
)
}

export default function ClassesTable({ campusId, headers, classes, getClassColumns }) {
if (!classes) return <></>

const tableRowEls = []

for (const section of classes) {
const numRows = section.times.length || 1
const tableCols = getClassColumns(section)

tableRowEls.push(
<tr>
{tableCols.map((name) => <td rowspan={numRows}>{name}</td>)}
<ClassTimeCols campusId={campusId} time={section.times[0] || {}} />
</tr>
)

for (const time of section.times.slice(1)) {
if (!time) continue
tableRowEls.push(
<tr>
<ClassTimeCols campusId={campusId} time={time} />
</tr>
)
}
}

return (
<div class="table-container" style={{ fontSize: '14px' }}>
<table class="classes data">
<thead>
<tr>
{headers.map((name) => <th>{name}</th>)}
</tr>
</thead>
<tbody>
{tableRowEls}
</tbody>
</table>
</div>
)
}
Loading