CC / app.py
Neha13's picture
Update app.py
df6f1ef verified
import copy
import streamlit as st
stateMap = {}
diction = {}
start_symbol = ""
rules = []
statesDict={}
stateCount = 0
separatedRulesList = []
def grammarAugmentation(rules, nonterm_userdef, start_symbol):
newRules = []
newChar = start_symbol + "'"
while (newChar in nonterm_userdef):
newChar += "'"
newRules.append([newChar, ['.', start_symbol]])
for rule in rules:
k = rule.split("->")
lhs = k[0].strip()
rhs = k[1].strip()
multirhs = rhs.split('|')
for rhs1 in multirhs:
rhs1 = rhs1.strip().split()
rhs1.insert(0, '.')
newRules.append([lhs, rhs1])
return newRules
def findClosure(input_state, dotSymbol):
global separatedRulesList
closureSet = []
if isinstance(input_state, int):
for rule in separatedRulesList:
if rule[0] == dotSymbol:
closureSet.append(rule)
else:
closureSet = input_state
prevLen = -1
while prevLen != len(closureSet):
prevLen = len(closureSet)
tempClosureSet = []
for rule in closureSet:
indexOfDot = rule[1].index('.')
if rule[1][-1] != '.':
dotPointsHere = rule[1][indexOfDot + 1]
for in_rule in separatedRulesList:
if dotPointsHere == in_rule[0] and in_rule not in tempClosureSet:
tempClosureSet.append(in_rule)
for rule in tempClosureSet:
if rule not in closureSet:
closureSet.append(rule)
return closureSet
def compute_GOTO(state):
global statesDict, stateCount
generateStatesFor = []
for rule in statesDict[state]:
if rule[1][-1] != '.':
indexOfDot = rule[1].index('.')
dotPointsHere = rule[1][indexOfDot + 1]
if dotPointsHere not in generateStatesFor:
generateStatesFor.append(dotPointsHere)
if len(generateStatesFor) != 0:
for symbol in generateStatesFor:
GOTO(state, symbol)
return
def GOTO(state, charNextToDot):
global statesDict, stateCount, stateMap
newState = []
for rule in statesDict[state]:
indexOfDot = rule[1].index('.')
if rule[1][-1] != '.':
if rule[1][indexOfDot + 1] == charNextToDot:
shiftedRule = copy.deepcopy(rule)
shiftedRule[1][indexOfDot] = shiftedRule[1][indexOfDot + 1]
shiftedRule[1][indexOfDot + 1] = '.'
newState.append(shiftedRule)
addClosureRules = []
for rule in newState:
indexDot = rule[1].index('.')
if rule[1][-1] != '.':
closureRes = findClosure(newState, rule[1][indexDot + 1])
for rule in closureRes:
if rule not in addClosureRules and rule not in newState:
addClosureRules.append(rule)
for rule in addClosureRules:
newState.append(rule)
stateExists = -1
for state_num in statesDict:
if statesDict[state_num] == newState:
stateExists = state_num
break
if stateExists == -1:
stateCount += 1
statesDict[stateCount] = newState
stateMap[(state, charNextToDot)] = stateCount
else:
stateMap[(state, charNextToDot)] = stateExists
return
def generateStates(I0):
global statesDict, stateCount
statesDict[0] = I0
stateCount = 0
prev_len = -1
called_GOTO_on = []
while (len(statesDict) != prev_len):
prev_len = len(statesDict)
keys = list(statesDict.keys())
for key in keys:
if key not in called_GOTO_on:
called_GOTO_on.append(key)
compute_GOTO(key)
return
def first(rule):
global rules, term_userdef, diction, firsts
if len(rule) != 0 and rule[0] in term_userdef:
return [rule[0]]
if len(rule) != 0 and rule[0] == '#':
return ['#']
if len(rule) != 0:
fres = []
rhs_rules = diction[rule[0]]
for itr in rhs_rules:
indivRes = first(itr)
if '#' in indivRes:
indivRes.remove('#')
if len(rule) > 1:
indivRes += first(rule[1:])
else:
indivRes.append('#')
fres.extend(indivRes)
return list(set(fres))
return []
def follow(nt):
global start_symbol, diction, firsts, follows
solset = set()
if nt == start_symbol:
solset.add('$')
for curNT in diction:
rhs = diction[curNT]
for subrule in rhs:
if nt in subrule:
while nt in subrule:
index_nt = subrule.index(nt)
subrule = subrule[index_nt + 1:]
if subrule:
res = first(subrule)
if '#' in res:
res.remove('#')
solset.update(res)
solset.update(follow(curNT))
else:
solset.update(res)
else:
if nt != curNT:
solset.update(follow(curNT))
return list(solset)
def createParseTable(statesDict, stateMap, T, NT):
global separatedRulesList, diction
parse_table = {i: {} for i in range(len(statesDict))}
for (state, symbol) in stateMap:
if symbol in T:
parse_table[state][symbol] = f'S{stateMap[(state, symbol)]}'
elif symbol in NT:
parse_table[state][symbol] = str(stateMap[(state, symbol)])
for state in statesDict:
for item in statesDict[state]:
if item[1][-1] == '.':
prod = f"{item[0]} -> {' '.join(item[1][:-1])}"
if prod == f"{separatedRulesList[0][0]} -> {separatedRulesList[0][1][1]}":
parse_table[state]['$'] = 'ACC'
else:
rule_index = next(
(i for i, rule in enumerate(separatedRulesList) if rule == item),
None
)
if rule_index is None:
print(f"Warning: Rule for item {item} not found in separatedRulesList.")
continue
for symbol in T + ['$']:
if symbol not in parse_table[state]:
parse_table[state][symbol] = f'R{rule_index}'
return parse_table
def main():
st.title("SLR(1) Parser Generator")
st.header("Grammar Input")
rules = st.text_area("Enter the grammar rules (one per line):",
"E -> E + T | T\nT -> T * F | F\nF -> ( E ) | id")
rules = rules.split('\n')
nonterm_userdef = st.text_input("Enter the non-terminal symbols (separated by space):", "E T F")
nonterm_userdef = nonterm_userdef.split()
term_userdef = st.text_input("Enter the terminal symbols (separated by space):", "id + * ( )")
term_userdef = term_userdef.split()
start_symbol = st.text_input("Enter the start symbol:", "E")
if st.button("Generate SLR(1) Parser"):
st.subheader("Original Grammar Input:")
for rule in rules:
st.write(rule)
st.subheader("Grammar after Augmentation:")
global separatedRulesList
separatedRulesList = grammarAugmentation(rules, nonterm_userdef, start_symbol)
for rule in separatedRulesList:
st.write(f"{rule[0]} -> {' '.join(rule[1])}")
start_symbol = separatedRulesList[0][0]
st.subheader("Calculated Closure: I0")
I0 = findClosure(0, start_symbol)
st.write("I0:")
for rule in I0:
st.write(f"{rule[0]} -> {' '.join(rule[1])}")
generateStates(I0)
st.subheader("Generated States:")
for state, rules in statesDict.items():
st.write(f"I{state}:")
for rule in rules:
st.write(f" {rule[0]} -> {' '.join(rule[1])}")
st.write("")
st.subheader("SLR(1) Parsing Table")
table = createParseTable(statesDict, stateMap, term_userdef, nonterm_userdef)
cols = ['State'] + term_userdef + ['$'] + nonterm_userdef
table_data = []
for state in range(len(statesDict)):
row = [f'I{state}']
for symbol in term_userdef + ['$'] + nonterm_userdef:
row.append(table[state].get(symbol, ''))
table_data.append(row)
st.table([cols] + table_data)
compact_table = "State`" + '`'.join(term_userdef + ['$'] + nonterm_userdef) + '`'
for state in range(len(statesDict)):
compact_table += f"I{state}"
for symbol in term_userdef + ['$'] + nonterm_userdef:
compact_table += table[state].get(symbol, '')
st.subheader("Follow Sets")
follow_sets = {}
for nt in nonterm_userdef:
follow_set = follow(nt)
follow_sets[nt] = follow_set
st.write(f"Follow({nt}): {{ {' , '.join(follow_set)} }}")
compact_follow = "Non-Terminal`Follow Set\n"
for nt, follow_set in follow_sets.items():
compact_follow += f"{nt}`{{ {' , '.join(follow_set)} }}\n"
st.text_area("Compact Follow Sets Representation:", value=compact_follow, height=300)
if __name__ == "__main__":
main()