aboutsummaryrefslogtreecommitdiffstats
path: root/test/lua
diff options
context:
space:
mode:
authorHadriel Kaplan <hadrielk@yahoo.com>2014-03-22 12:58:12 -0400
committerAlexis La Goutte <alexis.lagoutte@gmail.com>2014-03-22 17:42:16 +0000
commit69af33f0f3b35821574762867805a3b5e09bd62d (patch)
treec00369fbff92d4ae87e41ada589a41081c90c675 /test/lua
parent1f28f472c85f51cc7925f18bca285ccfe0d4e5f6 (diff)
Fix spelling mistakes in Lua test scripts
Change-Id: I8671eb3d3b46ec5c723a6545451ecbb33a10c807 Reviewed-on: https://code.wireshark.org/review/788 Reviewed-by: Hadriel Kaplan <hadrielk@yahoo.com> Reviewed-by: Alexis La Goutte <alexis.lagoutte@gmail.com>
Diffstat (limited to 'test/lua')
-rw-r--r--test/lua/acme_file.lua26
-rw-r--r--test/lua/dissector.lua18
-rw-r--r--test/lua/inspect.lua6
-rw-r--r--test/lua/pcap_file.lua6
4 files changed, 28 insertions, 28 deletions
diff --git a/test/lua/acme_file.lua b/test/lua/acme_file.lua
index 1f87fc640b..f00f25f0cb 100644
--- a/test/lua/acme_file.lua
+++ b/test/lua/acme_file.lua
@@ -208,7 +208,7 @@ local fh = FileHandler.new("Oracle Acme Packet logs", "acme",
-- don't exist in the log file for example to create IP headers we have to create
-- fake identification field values, and to create timestamps we have to guess the
-- year (and in some cases month/day as well), and for TCP we have to create fake
--- conneciton info, such as sequence numbers. We can't simply have a global static
+-- connection info, such as sequence numbers. We can't simply have a global static
-- variable holding such things, because Wireshark reads the file sequentially at
-- first, but then calls seek_read for random packets again and we don't want to
-- re-create the fake info again because it will be wrong. So we need to create it
@@ -223,7 +223,7 @@ local fh = FileHandler.new("Oracle Acme Packet logs", "acme",
-- I said above that this state is "global", but really it can't be global to this
-- whole script file, because more than one file can be opened for reading at the
--- same time. For exampel if the user presses the reload button, the capture file
+-- same time. For example if the user presses the reload button, the capture file
-- will be opened for reading before the previous (same) one is closed. So we have
-- to store state per-file. The good news is Wireshark gives us a convenient way to
-- do that, using the CaptureInfo.private_table attribute/member. We can save a Lua
@@ -231,7 +231,7 @@ local fh = FileHandler.new("Oracle Acme Packet logs", "acme",
-- later during the other read/seek_read/cose function calls.
-- So to store this per-file state, we're going to use Lua class objects. They're
--- just Lua tables that have functions and metafunctions and can be treated like
+-- just Lua tables that have functions and meta-functions and can be treated like
-- objects in terms of syntax/behavior.
local State = {}
@@ -268,13 +268,13 @@ function State.new()
return new_class
end
--- the indeces for the State.packets{} variable sub-tables
+-- the indices for the State.packets{} variable sub-tables
local IP_IDENT = 1
local TTIME = 2
local LOCAL_SEQ = 3
local REMOTE_SEQ = 4
--- the indeces for the State.tcb{} sub-tables
+-- the indices for the State.tcb{} sub-tables
local TLOCAL_SEQ = 1
local TREMOTE_SEQ = 2
@@ -438,7 +438,7 @@ function State:get_timestamp(line, file_position, seeking)
return self.nstime, line_pos
end
--- get_tail_time() gets a fictitous timestamp starting from 19:00:00 on Dec 31, 1969, and incrementing based
+-- get_tail_time() gets a fictitious timestamp starting from 19:00:00 on Dec 31, 1969, and incrementing based
-- on the minutes/secs/millisecs seen (i.e., if the minute wrapped then hour increases by 1, etc.).
-- this is needed for tail'ed log files, since they don't show month/day/hour
function State:get_tail_time(line, file_position, seeking)
@@ -620,7 +620,7 @@ local TCP = 20
-- classes.
-- For performance reasons, packet data is read line-by-line into a table (called bufftbl),
--- which is concatendated at the end. This avoids Lua building interim strings and garbage
+-- which is concatenated at the end. This avoids Lua building interim strings and garbage
-- collecting them. But it makes the code uglier. The get_data()/get_hex_data()/get_ascii_data()
-- methods read into this table they get passed, while the read_data() functions handle managing
-- the table.
@@ -715,7 +715,7 @@ function Packet:get_ascii_data(file, line, bufftbl, index, only_newline)
until line:find(delim)
- -- get rid of last \r\n, if we found a dashed delimeter, as it's not part of packet
+ -- get rid of last \r\n, if we found a dashed delimiter, as it's not part of packet
if found_delim then
bufflen = bufflen - bufftbl[index-1]:len()
bufftbl[index-1] = nil
@@ -806,7 +806,7 @@ end
--
local DataPacket = {}
local DataPacket_mt = { __index = DataPacket }
-setmetatable( DataPacket, Packet_mt ) -- make Dataacket inherit from Packet
+setmetatable( DataPacket, Packet_mt ) -- make DataPacket inherit from Packet
function DataPacket.new(...)
local new_class = Packet.new(...) -- the new instance
@@ -1081,7 +1081,7 @@ function BinPacket:get_data(file, line, bufftbl, index)
local bufflen, line = self:get_hex_data(file, line, bufftbl, index)
- -- now eat rest of message until delimeter or end of file
+ -- now eat rest of message until delimiter or end of file
-- we'll put them in comments
line = self:get_comment_data(file, line, delim)
@@ -1090,7 +1090,7 @@ function BinPacket:get_data(file, line, bufftbl, index)
end
----------------------------------------
--- DnsPacket class, for DNS packets (which are binary but with commments at top)
+-- DnsPacket class, for DNS packets (which are binary but with comments at top)
--
local DnsPacket = {}
local DnsPacket_mt = { __index = DnsPacket }
@@ -1107,12 +1107,12 @@ function DnsPacket:get_data(file, line, bufftbl, index)
-- it's UDP regardless of what parse_header() thinks
self.ttype = UDP
- -- comments are at top instead of bottom of messsage
+ -- comments are at top instead of bottom of message
line = self:get_comment_data(file, line, binpacket_start_pattern)
local bufflen, line = self:get_hex_data(file, line, bufftbl, index)
- -- now eat rest of message until delimeter or end of file
+ -- now eat rest of message until delimiter or end of file
while line and not line:find(delim) do
line = file:read()
end
diff --git a/test/lua/dissector.lua b/test/lua/dissector.lua
index 632ae87f9b..3fb835f289 100644
--- a/test/lua/dissector.lua
+++ b/test/lua/dissector.lua
@@ -28,7 +28,7 @@
-- Once the script is loaded, it creates a new protocol named "MyDNS" (or "MYDNS" in some places). If you have
-- a capture file with DNS packets in it, simply select one in the Packet List pane, right-click on it, and
-- select "Decode As ...", and then in the dialog box that shows up scroll down the list of protocols to one
--- called "MYDNS", select that and click the "ok" or "apply" button. Voila`, your'e now decoding DNS packets
+-- called "MYDNS", select that and click the "ok" or "apply" button. Voila`, you're now decoding DNS packets
-- using the simplistic dissector in this script. Another way is to download the capture file made for
-- this script, and open that - since the DNS packets in it use UDP port 65333 (instead of the default 53),
-- and since the MyDNS protocol in this script has been set to automatically decode UDP port 65333, it will
@@ -123,7 +123,7 @@ local pf_query_class = ProtoField.uint16("mydns.query.class", "Class", ba
----------------------------------------
-- this actually registers the ProtoFields above, into our new Protocol
--- in a real script I wouldn't do it this way; I'd build a table of fields programaticaly
+-- in a real script I wouldn't do it this way; I'd build a table of fields programmatically
-- and then set dns.fields to it, so as to avoid forgetting a field
dns.fields = { pf_trasaction_id, pf_flags,
pf_num_questions, pf_num_answers, pf_num_authority_rr, pf_num_additional_rr,
@@ -200,7 +200,7 @@ function dns.dissector(tvbuf,pktinfo,root)
-- We want to check that the packet size is rational during dissection, so let's get the length of the
-- packet buffer (Tvb).
- -- Because DNS has no additonal payload data other than itself, and it rides on UDP without padding,
+ -- Because DNS has no additional payload data other than itself, and it rides on UDP without padding,
-- we can use tvb:len() or tvb:reported_len() here; but I prefer tvb:reported_length_remaining() as it's safer.
local pktlen = tvbuf:reported_length_remaining()
@@ -224,7 +224,7 @@ function dns.dissector(tvbuf,pktinfo,root)
tree:add(pf_trasaction_id, tvbuf:range(0,2))
-- We'd like to put the transaction id number in the GUI row for this packet, in its
- -- INFO column/cell. Firt we need the transaction id value, though. Since we just
+ -- INFO column/cell. First we need the transaction id value, though. Since we just
-- dissected it with the previous code line, we could now get it using a Field's
-- FieldInfo extractor, but instead we'll get it directly from the TvbRange just
-- to show how to do that. We'll use Field/FieldInfo extractors later on...
@@ -303,7 +303,7 @@ function dns.dissector(tvbuf,pktinfo,root)
-- now add the first query to the 'Queries' child tree we just created
-- we're going to change the string generated by this later, after we figure out the subsequent fields.
- -- the whole query field is the query name field length we just got, plus the 20byte type and 2-byte class
+ -- the whole query field is the query name field length we just got, plus the 20 byte type and 2-byte class
local q_tree = queries_tree:add(pf_query, tvbuf:range(pos, name_len + 4))
q_tree:add(pf_query_name, tvbuf:range(pos, name_len), name)
@@ -353,7 +353,7 @@ udp_encap_table:add(MYDNS_PROTO_UDP_PORT, dns)
-- dissect it if it's for us, and we need to return true if it's for us, or else false
-- figuring out if it's for us or not is not easy
-- we need to try as hard as possible, or else we'll think it's for us when it's
--- not and block other heuristic dissectors from getting their chanc
+-- not and block other heuristic dissectors from getting their chance
--
-- in practice, you'd never set a dissector like this to be heuristic, because there
-- just isn't enough information to safely detect if it's DNS or not
@@ -370,7 +370,7 @@ local function heur_dissect_dns(tvbuf,pktinfo,root)
local tvbr = tvbuf:range(0,DNS_HDR_LEN)
- -- the first 2 bytes are tansaction id, which can be anything so no point in checking those
+ -- the first 2 bytes are transaction id, which can be anything so no point in checking those
-- the next 2 bytes contain flags, a couple of which have some values we can check against
-- the opcode has to be 0, 1, 2, 4 or 5
@@ -403,13 +403,13 @@ local function heur_dissect_dns(tvbuf,pktinfo,root)
dprint2("heur_dissect_dns: everything looks good calling the real dissector")
- -- don't do this line in your script - I'm just doing it so our testsuite can
+ -- don't do this line in your script - I'm just doing it so our test-suite can
-- verify this script
root:add("Heuristic dissector used"):set_generated()
-- ok, looks like it's ours, so go dissect it
-- note: calling the dissector directly like this is new in 1.11.3
- -- also note that calling a Dissector objkect, as this does, means we don't
+ -- also note that calling a Dissector object, as this does, means we don't
-- get back the return value of the dissector function we created previously
-- so it might be better to just call the function directly instead of doing
-- this, but this script is used for testing and this tests the call() function
diff --git a/test/lua/inspect.lua b/test/lua/inspect.lua
index 1a57ade8b6..6b4aff9cb4 100644
--- a/test/lua/inspect.lua
+++ b/test/lua/inspect.lua
@@ -53,7 +53,7 @@ local inspect ={
_DEPTH_MARKER = " ['<depth>'] = true " -- instead of printing '...' we print this
}
--- Apostrophizes the string if it has quotes, but not aphostrophes
+-- Apostrophizes the string if it has quotes, but not apostrophes
-- Otherwise, it returns a regular quoted string
local function smartQuote(str)
if str:match('"') and not str:match("'") then
@@ -516,7 +516,7 @@ end
-- Like inspect(), there's a 'filter' option, which works the same way:
-- it ignores its value completely in terms of matching, so their string values
-- can be different, but the keys still have to exist. Sub-tables of
--- such keys (ie, if the key's value is a table) are not checked/compared.
+-- such keys (i.e., if the key's value is a table) are not checked/compared.
-- In other words, it's identical to the filter option for inspect().
--
-- The option 'ignore' is similar to 'filter', except matching ones
@@ -617,7 +617,7 @@ function inspect.compare(firstTable, secondTable, options)
elseif tf == 'number' then
return f == s or options.nonumber
else
- -- asume they're the same functions/userdata/looped-table
+ -- assume they're the same functions/userdata/looped-table
-- type matching before would already cover it otherwise
return true
end
diff --git a/test/lua/pcap_file.lua b/test/lua/pcap_file.lua
index 87e8a57cee..9f42f9700b 100644
--- a/test/lua/pcap_file.lua
+++ b/test/lua/pcap_file.lua
@@ -437,10 +437,10 @@ parse_file_header = function(file)
end
-- this is: magic, version_major, version_minor, timezone, sigfigs, snaplen, encap type
- local fields = { Struct.unpack(FILE_HEADER_PATT, line) }
+ local fields = { Struct.unpack(file_settings.file_hdr_patt, line) }
-- sanity check; also note that Struct.unpack() returns the fields plus
- -- a number of where in the line it stopped reading (ie, the end in this case)
+ -- a number of where in the line it stopped reading (i.e., the end in this case)
-- so we got back number of fields + 1
if #fields ~= NUM_HDR_FIELDS + 1 then
-- this should never happen, since we already told file:read() to grab enough bytes
@@ -544,7 +544,7 @@ parse_rec_header = function(funcname, file, file_settings, frame)
local fields = { Struct.unpack(file_settings.rec_hdr_patt, line) }
-- sanity check; also note that Struct.unpack() returns the fields plus
- -- a number of where in the line it stopped reading (ie, the end in this case)
+ -- a number of where in the line it stopped reading (i.e., the end in this case)
-- so we got back number of fields + 1
if #fields ~= file_settings.num_rec_fields + 1 then
dprint(funcname, ": parse_rec_header: failed to read the record header, got:",