Unverified Commit 0d6a882f authored by dhanajitb's avatar dhanajitb Committed by GitHub
Browse files

Cleaned some redundant lines

```while not args.unconditional:
   if not args.unconditional:
```
These lines have been updated
parent 4d3721f9
...@@ -83,29 +83,29 @@ def run_model(): ...@@ -83,29 +83,29 @@ def run_model():
elif args.length > model.config.n_ctx: elif args.length > model.config.n_ctx:
raise ValueError("Can't get samples longer than window size: %s" % model.config.n_ctx) raise ValueError("Can't get samples longer than window size: %s" % model.config.n_ctx)
while not args.unconditional: if not args.unconditional:
if not args.unconditional: while True:
raw_text = input("Model prompt >>> ") raw_text = input("Model prompt >>> ")
while not raw_text: while not raw_text:
print('Prompt should not be empty!') print('Prompt should not be empty!')
raw_text = input("Model prompt >>> ") raw_text = input("Model prompt >>> ")
context_tokens = enc.encode(raw_text) context_tokens = enc.encode(raw_text)
generated = 0 generated = 0
for _ in range(args.nsamples // args.batch_size): for _ in range(args.nsamples // args.batch_size):
out = sample_sequence( out = sample_sequence(
model=model, length=args.length, model=model, length=args.length,
context=context_tokens if not args.unconditional else None, context=context_tokens,
start_token=enc.encoder['<|endoftext|>'] if args.unconditional else None, start_token=None,
batch_size=args.batch_size, batch_size=args.batch_size,
temperature=args.temperature, top_k=args.top_k, device=device temperature=args.temperature, top_k=args.top_k, device=device
) )
out = out[:, len(context_tokens):].tolist() out = out[:, len(context_tokens):].tolist()
for i in range(args.batch_size): for i in range(args.batch_size):
generated += 1 generated += 1
text = enc.decode(out[i]) text = enc.decode(out[i])
print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40) print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40)
print(text) print(text)
print("=" * 80) print("=" * 80)
if args.unconditional: if args.unconditional:
generated = 0 generated = 0
for _ in range(args.nsamples // args.batch_size): for _ in range(args.nsamples // args.batch_size):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment